Browse Source

update SDWebImage

B. Petersen 1 year ago
parent
commit
dff9026f26
100 changed files with 3392 additions and 2068 deletions
  1. 18 15
      Podfile.lock
  2. 18 15
      Pods/Manifest.lock
  3. 1423 1404
      Pods/Pods.xcodeproj/project.pbxproj
  4. 43 2
      Pods/SDWebImage/README.md
  5. 8 18
      Pods/SDWebImage/SDWebImage/Core/NSButton+WebCache.m
  6. 19 0
      Pods/SDWebImage/SDWebImage/Core/SDAnimatedImage.h
  7. 6 0
      Pods/SDWebImage/SDWebImage/Core/SDAnimatedImage.m
  8. 1 0
      Pods/SDWebImage/SDWebImage/Core/SDAnimatedImagePlayer.h
  9. 37 103
      Pods/SDWebImage/SDWebImage/Core/SDAnimatedImagePlayer.m
  10. 10 0
      Pods/SDWebImage/SDWebImage/Core/SDAnimatedImageRep.h
  11. 16 3
      Pods/SDWebImage/SDWebImage/Core/SDAnimatedImageRep.m
  12. 15 11
      Pods/SDWebImage/SDWebImage/Core/SDCallbackQueue.m
  13. 6 0
      Pods/SDWebImage/SDWebImage/Core/SDGraphicsImageRenderer.h
  14. 12 4
      Pods/SDWebImage/SDWebImage/Core/SDGraphicsImageRenderer.m
  15. 3 1
      Pods/SDWebImage/SDWebImage/Core/SDImageCache.h
  16. 21 9
      Pods/SDWebImage/SDWebImage/Core/SDImageCache.m
  17. 18 12
      Pods/SDWebImage/SDWebImage/Core/SDImageCacheDefine.m
  18. 13 0
      Pods/SDWebImage/SDWebImage/Core/SDImageCoder.h
  19. 1 0
      Pods/SDWebImage/SDWebImage/Core/SDImageCoder.m
  20. 76 2
      Pods/SDWebImage/SDWebImage/Core/SDImageCoderHelper.h
  21. 245 55
      Pods/SDWebImage/SDWebImage/Core/SDImageCoderHelper.m
  22. 4 5
      Pods/SDWebImage/SDWebImage/Core/SDImageGraphics.m
  23. 64 6
      Pods/SDWebImage/SDWebImage/Core/SDImageIOAnimatedCoder.m
  24. 22 14
      Pods/SDWebImage/SDWebImage/Core/SDImageLoader.m
  25. 6 0
      Pods/SDWebImage/SDWebImage/Core/SDWebImageCompat.h
  26. 40 5
      Pods/SDWebImage/SDWebImage/Core/SDWebImageDefine.h
  27. 22 14
      Pods/SDWebImage/SDWebImage/Core/SDWebImageDefine.m
  28. 7 1
      Pods/SDWebImage/SDWebImage/Core/SDWebImageDownloader.h
  29. 10 25
      Pods/SDWebImage/SDWebImage/Core/SDWebImageDownloader.m
  30. 97 63
      Pods/SDWebImage/SDWebImage/Core/SDWebImageDownloaderOperation.m
  31. 1 0
      Pods/SDWebImage/SDWebImage/Core/SDWebImageError.h
  32. 2 0
      Pods/SDWebImage/SDWebImage/Core/SDWebImageIndicator.h
  33. 19 3
      Pods/SDWebImage/SDWebImage/Core/SDWebImageIndicator.m
  34. 15 0
      Pods/SDWebImage/SDWebImage/Core/UIButton+WebCache.h
  35. 35 71
      Pods/SDWebImage/SDWebImage/Core/UIButton+WebCache.m
  36. 6 0
      Pods/SDWebImage/SDWebImage/Core/UIImage+ForceDecode.h
  37. 1 18
      Pods/SDWebImage/SDWebImage/Core/UIImage+ForceDecode.m
  38. 1 0
      Pods/SDWebImage/SDWebImage/Core/UIImage+MultiFormat.h
  39. 13 0
      Pods/SDWebImage/SDWebImage/Core/UIImage+MultiFormat.m
  40. 4 0
      Pods/SDWebImage/SDWebImage/Core/UIImage+Transform.h
  41. 30 9
      Pods/SDWebImage/SDWebImage/Core/UIImage+Transform.m
  42. 13 0
      Pods/SDWebImage/SDWebImage/Core/UIImageView+HighlightedWebCache.h
  43. 12 3
      Pods/SDWebImage/SDWebImage/Core/UIImageView+HighlightedWebCache.m
  44. 15 0
      Pods/SDWebImage/SDWebImage/Core/UIImageView+WebCache.h
  45. 11 0
      Pods/SDWebImage/SDWebImage/Core/UIImageView+WebCache.m
  46. 22 8
      Pods/SDWebImage/SDWebImage/Core/UIView+WebCache.h
  47. 28 14
      Pods/SDWebImage/SDWebImage/Core/UIView+WebCache.m
  48. 0 1
      Pods/SDWebImage/SDWebImage/Core/UIView+WebCacheOperation.m
  49. 62 0
      Pods/SDWebImage/SDWebImage/Core/UIView+WebCacheState.h
  50. 56 0
      Pods/SDWebImage/SDWebImage/Core/UIView+WebCacheState.m
  51. 1 0
      Pods/SDWebImage/SDWebImage/Private/SDDeviceHelper.m
  52. 8 4
      Pods/SDWebImage/SDWebImage/Private/SDDisplayLink.m
  53. 3 1
      Pods/SDWebImage/SDWebImage/Private/SDImageAssetManager.m
  54. 40 0
      Pods/SDWebImage/SDWebImage/Private/SDImageFramePool.h
  55. 164 0
      Pods/SDWebImage/SDWebImage/Private/SDImageFramePool.m
  56. 65 0
      Pods/SDWebImage/SDWebImage/Private/SDInternalMacros.h
  57. 1 0
      Pods/SDWebImage/WebImage/SDWebImage.h
  58. 22 0
      Pods/SDWebImageWebPCoder/README.md
  59. 174 23
      Pods/SDWebImageWebPCoder/SDWebImageWebPCoder/Classes/SDImageWebPCoder.m
  60. 1 1
      Pods/Target Support Files/Pods-DcShare/Pods-DcShare.debug.xcconfig
  61. 1 1
      Pods/Target Support Files/Pods-DcShare/Pods-DcShare.release.xcconfig
  62. 1 1
      Pods/Target Support Files/SDWebImage/SDWebImage-Info.plist
  63. 1 0
      Pods/Target Support Files/SDWebImage/SDWebImage-umbrella.h
  64. 1 1
      Pods/Target Support Files/SDWebImageWebPCoder/SDWebImageWebPCoder-Info.plist
  65. 1 1
      Pods/Target Support Files/SDWebImageWebPCoder/SDWebImageWebPCoder.debug.xcconfig
  66. 1 1
      Pods/Target Support Files/SDWebImageWebPCoder/SDWebImageWebPCoder.release.xcconfig
  67. 1 1
      Pods/Target Support Files/libwebp/libwebp-Info.plist
  68. 1 0
      Pods/Target Support Files/libwebp/libwebp-umbrella.h
  69. 1 1
      Pods/libwebp/README.md
  70. 12 5
      Pods/libwebp/sharpyuv/Makefile.am
  71. 11 0
      Pods/libwebp/sharpyuv/libsharpyuv.pc.in
  72. 41 0
      Pods/libwebp/sharpyuv/libsharpyuv.rc
  73. 44 15
      Pods/libwebp/sharpyuv/sharpyuv.c
  74. 31 9
      Pods/libwebp/sharpyuv/sharpyuv.h
  75. 14 0
      Pods/libwebp/sharpyuv/sharpyuv_cpu.c
  76. 22 0
      Pods/libwebp/sharpyuv/sharpyuv_cpu.h
  77. 1 1
      Pods/libwebp/sharpyuv/sharpyuv_csp.c
  78. 4 3
      Pods/libwebp/sharpyuv/sharpyuv_csp.h
  79. 10 8
      Pods/libwebp/sharpyuv/sharpyuv_dsp.c
  80. 3 4
      Pods/libwebp/sharpyuv/sharpyuv_dsp.h
  81. 0 1
      Pods/libwebp/sharpyuv/sharpyuv_gamma.c
  82. 1 1
      Pods/libwebp/sharpyuv/sharpyuv_gamma.h
  83. 4 5
      Pods/libwebp/sharpyuv/sharpyuv_neon.c
  84. 2 5
      Pods/libwebp/sharpyuv/sharpyuv_sse2.c
  85. 2 2
      Pods/libwebp/src/Makefile.am
  86. 2 1
      Pods/libwebp/src/dec/tree_dec.c
  87. 2 0
      Pods/libwebp/src/dec/vp8_dec.c
  88. 2 2
      Pods/libwebp/src/dec/vp8i_dec.h
  89. 26 22
      Pods/libwebp/src/dec/vp8l_dec.c
  90. 1 1
      Pods/libwebp/src/dec/vp8li_dec.h
  91. 21 14
      Pods/libwebp/src/dec/webp_dec.c
  92. 1 1
      Pods/libwebp/src/demux/Makefile.am
  93. 2 2
      Pods/libwebp/src/demux/demux.c
  94. 2 2
      Pods/libwebp/src/demux/libwebpdemux.pc.in
  95. 5 5
      Pods/libwebp/src/demux/libwebpdemux.rc
  96. 1 0
      Pods/libwebp/src/dsp/alpha_processing.c
  97. 6 6
      Pods/libwebp/src/dsp/alpha_processing_sse2.c
  98. 1 1
      Pods/libwebp/src/dsp/alpha_processing_sse41.c
  99. 1 0
      Pods/libwebp/src/dsp/cost.c
  100. 2 2
      Pods/libwebp/src/dsp/cost_neon.c

+ 18 - 15
Podfile.lock

@@ -2,26 +2,29 @@ PODS:
   - CocoaLumberjack (3.8.0):
     - CocoaLumberjack/Core (= 3.8.0)
   - CocoaLumberjack/Core (3.8.0)
-  - libwebp (1.2.4):
-    - libwebp/demux (= 1.2.4)
-    - libwebp/mux (= 1.2.4)
-    - libwebp/webp (= 1.2.4)
-  - libwebp/demux (1.2.4):
+  - libwebp (1.3.2):
+    - libwebp/demux (= 1.3.2)
+    - libwebp/mux (= 1.3.2)
+    - libwebp/sharpyuv (= 1.3.2)
+    - libwebp/webp (= 1.3.2)
+  - libwebp/demux (1.3.2):
     - libwebp/webp
-  - libwebp/mux (1.2.4):
+  - libwebp/mux (1.3.2):
     - libwebp/demux
-  - libwebp/webp (1.2.4)
+  - libwebp/sharpyuv (1.3.2)
+  - libwebp/webp (1.3.2):
+    - libwebp/sharpyuv
   - ReachabilitySwift (5.0.0)
   - SCSiriWaveformView (1.1.2)
-  - SDWebImage (5.15.6):
-    - SDWebImage/Core (= 5.15.6)
-  - SDWebImage/Core (5.15.6)
+  - SDWebImage (5.18.2):
+    - SDWebImage/Core (= 5.18.2)
+  - SDWebImage/Core (5.18.2)
   - SDWebImageSVGKitPlugin (1.4.0):
     - SDWebImage/Core (~> 5.10)
     - SVGKit (~> 3.0)
-  - SDWebImageWebPCoder (0.11.0):
+  - SDWebImageWebPCoder (0.13.0):
     - libwebp (~> 1.0)
-    - SDWebImage/Core (~> 5.15)
+    - SDWebImage/Core (~> 5.17)
   - SVGKit (3.0.0):
     - CocoaLumberjack (~> 3.0)
   - Swifter (1.5.0)
@@ -64,12 +67,12 @@ CHECKOUT OPTIONS:
 
 SPEC CHECKSUMS:
   CocoaLumberjack: 78abfb691154e2a9df8ded4350d504ee19d90732
-  libwebp: f62cb61d0a484ba548448a4bd52aabf150ff6eef
+  libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009
   ReachabilitySwift: 985039c6f7b23a1da463388634119492ff86c825
   SCSiriWaveformView: 016392911fb442c17d6dbad68e666edb13193c02
-  SDWebImage: d47d81bea8a77187896b620dc79c3c528e8906b9
+  SDWebImage: c0de394d7cf7f9838aed1fd6bb6037654a4572e4
   SDWebImageSVGKitPlugin: 7542dd07c344ec3415ded0461a1161a6f087e0c9
-  SDWebImageWebPCoder: 295a6573c512f54ad2dd58098e64e17dcf008499
+  SDWebImageWebPCoder: af09429398d99d524cae2fe00f6f0f6e491ed102
   SVGKit: 1ad7513f8c74d9652f94ed64ddecda1a23864dea
   Swifter: aa3514bbb8df8980c118f7bb1b80f2da24e39c2b
   SwiftFormat: 4334264324e20bad415888316165bdc1fc2860bc

+ 18 - 15
Pods/Manifest.lock

@@ -2,26 +2,29 @@ PODS:
   - CocoaLumberjack (3.8.0):
     - CocoaLumberjack/Core (= 3.8.0)
   - CocoaLumberjack/Core (3.8.0)
-  - libwebp (1.2.4):
-    - libwebp/demux (= 1.2.4)
-    - libwebp/mux (= 1.2.4)
-    - libwebp/webp (= 1.2.4)
-  - libwebp/demux (1.2.4):
+  - libwebp (1.3.2):
+    - libwebp/demux (= 1.3.2)
+    - libwebp/mux (= 1.3.2)
+    - libwebp/sharpyuv (= 1.3.2)
+    - libwebp/webp (= 1.3.2)
+  - libwebp/demux (1.3.2):
     - libwebp/webp
-  - libwebp/mux (1.2.4):
+  - libwebp/mux (1.3.2):
     - libwebp/demux
-  - libwebp/webp (1.2.4)
+  - libwebp/sharpyuv (1.3.2)
+  - libwebp/webp (1.3.2):
+    - libwebp/sharpyuv
   - ReachabilitySwift (5.0.0)
   - SCSiriWaveformView (1.1.2)
-  - SDWebImage (5.15.6):
-    - SDWebImage/Core (= 5.15.6)
-  - SDWebImage/Core (5.15.6)
+  - SDWebImage (5.18.2):
+    - SDWebImage/Core (= 5.18.2)
+  - SDWebImage/Core (5.18.2)
   - SDWebImageSVGKitPlugin (1.4.0):
     - SDWebImage/Core (~> 5.10)
     - SVGKit (~> 3.0)
-  - SDWebImageWebPCoder (0.11.0):
+  - SDWebImageWebPCoder (0.13.0):
     - libwebp (~> 1.0)
-    - SDWebImage/Core (~> 5.15)
+    - SDWebImage/Core (~> 5.17)
   - SVGKit (3.0.0):
     - CocoaLumberjack (~> 3.0)
   - Swifter (1.5.0)
@@ -64,12 +67,12 @@ CHECKOUT OPTIONS:
 
 SPEC CHECKSUMS:
   CocoaLumberjack: 78abfb691154e2a9df8ded4350d504ee19d90732
-  libwebp: f62cb61d0a484ba548448a4bd52aabf150ff6eef
+  libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009
   ReachabilitySwift: 985039c6f7b23a1da463388634119492ff86c825
   SCSiriWaveformView: 016392911fb442c17d6dbad68e666edb13193c02
-  SDWebImage: d47d81bea8a77187896b620dc79c3c528e8906b9
+  SDWebImage: c0de394d7cf7f9838aed1fd6bb6037654a4572e4
   SDWebImageSVGKitPlugin: 7542dd07c344ec3415ded0461a1161a6f087e0c9
-  SDWebImageWebPCoder: 295a6573c512f54ad2dd58098e64e17dcf008499
+  SDWebImageWebPCoder: af09429398d99d524cae2fe00f6f0f6e491ed102
   SVGKit: 1ad7513f8c74d9652f94ed64ddecda1a23864dea
   Swifter: aa3514bbb8df8980c118f7bb1b80f2da24e39c2b
   SwiftFormat: 4334264324e20bad415888316165bdc1fc2860bc

File diff suppressed because it is too large
+ 1423 - 1404
Pods/Pods.xcodeproj/project.pbxproj


+ 43 - 2
Pods/SDWebImage/README.md

@@ -37,6 +37,16 @@ Note: `SD` is the prefix for **Simple Design** (which is the team name in Daily
 - [x] Modern Objective-C and better Swift support 
 - [x] Performances!
 
+## For Apple visionOS
+
+From 5.18.0, SDWebImage can be compiled for visionOS platform. However, it's still in beta and may contains issues unlike the stable iOS UIKit support. Welcome to have a try and [report issue](https://github.com/SDWebImage/SDWebImage/issues).
+
+To build on visionOS, currently we only support the standard Xcode integration.
+
+See `Installation with Swift Package Manager` and `Manual Installation Guide` below.
+
+Once the visionOS toolchain is stable, we will add the other package manager support later (include CocoaPods).
+
 ## Supported Image Formats
 
 - Image formats supported by Apple system (JPEG, PNG, TIFF, BMP, ...), including [GIF](https://github.com/SDWebImage/SDWebImage/wiki/Advanced-Usage#gif-coder)/[APNG](https://github.com/SDWebImage/SDWebImage/wiki/Advanced-Usage#apng-coder) animated image
@@ -279,7 +289,31 @@ let package = Package(
 
 ### Manual Installation Guide
 
-See more on [Manual install Guide](https://github.com/SDWebImage/SDWebImage/wiki/Installation-Guide#manual-installation-guide)
++ Check your command line Xcode version
+
+```
+sudo xcode-select -s /path/to/Xcode.app
+```
+
+or
+
+```
+export DEVELOPER_DIR=/path/to/Xcode.app/Contents/Developer
+```
+
++ Run the script to build frameworks
+
+```
+./Scripts/build-frameworks.sh
+```
+
++ Run the script to merge XCFramework
+
+```
+./Scripts/create-xcframework.sh
+```
+
+See more on wiki: [Manual install Guide](https://github.com/SDWebImage/SDWebImage/wiki/Installation-Guide#manual-installation-guide)
 
 ### Import headers in your source files
 
@@ -301,7 +335,14 @@ At this point your workspace should build without error. If you are having probl
 community can help you solve it.
 
 ## Data Collection Practices
-As required by the [App privacy details on the App Store](https://developer.apple.com/app-store/app-privacy-details/), here's SDWebImage's list of [Data Collection Practices](https://sdwebimage.github.io/DataCollection/index.html).
+
+From Xcode 15, we provide the new `PrivacyInfo.xcprivacy` file for privacy details, see [Describing data use in privacy manifests](https://developer.apple.com/documentation/bundleresources/privacy_manifest_files/describing_data_use_in_privacy_manifests?language=objc)
+
+You can exports the privacy report after archive your App by integrate SDWebImage via SwiftPM/XCFramework (Note: CocoaPods does not support currently).
+
+For old version, as required by the [App privacy details on the App Store](https://developer.apple.com/app-store/app-privacy-details/), here's SDWebImage's list of [Data Collection Practices](https://sdwebimage.github.io/DataCollection/index.html).
+
+
 
 ## Author
 - [Olivier Poitrey](https://github.com/rs)

+ 8 - 18
Pods/SDWebImage/SDWebImage/Core/NSButton+WebCache.m

@@ -12,11 +12,10 @@
 
 #import "objc/runtime.h"
 #import "UIView+WebCacheOperation.h"
+#import "UIView+WebCacheState.h"
 #import "UIView+WebCache.h"
 #import "SDInternalMacros.h"
 
-static NSString * const SDAlternateImageOperationKey = @"NSButtonAlternateImageOperation";
-
 @implementation NSButton (WebCache)
 
 #pragma mark - Image
@@ -59,7 +58,6 @@ static NSString * const SDAlternateImageOperationKey = @"NSButtonAlternateImageO
                    context:(nullable SDWebImageContext *)context
                   progress:(nullable SDImageLoaderProgressBlock)progressBlock
                  completed:(nullable SDExternalCompletionBlock)completedBlock {
-    self.sd_currentImageURL = url;
     [self sd_internalSetImageWithURL:url
                     placeholderImage:placeholder
                              options:options
@@ -113,15 +111,13 @@ static NSString * const SDAlternateImageOperationKey = @"NSButtonAlternateImageO
                             context:(nullable SDWebImageContext *)context
                            progress:(nullable SDImageLoaderProgressBlock)progressBlock
                           completed:(nullable SDExternalCompletionBlock)completedBlock {
-    self.sd_currentAlternateImageURL = url;
-    
     SDWebImageMutableContext *mutableContext;
     if (context) {
         mutableContext = [context mutableCopy];
     } else {
         mutableContext = [NSMutableDictionary dictionary];
     }
-    mutableContext[SDWebImageContextSetImageOperationKey] = SDAlternateImageOperationKey;
+    mutableContext[SDWebImageContextSetImageOperationKey] = @keypath(self, alternateImage);
     @weakify(self);
     [self sd_internalSetImageWithURL:url
                     placeholderImage:placeholder
@@ -142,29 +138,23 @@ static NSString * const SDAlternateImageOperationKey = @"NSButtonAlternateImageO
 #pragma mark - Cancel
 
 - (void)sd_cancelCurrentImageLoad {
-    [self sd_cancelImageLoadOperationWithKey:NSStringFromClass([self class])];
+    [self sd_cancelImageLoadOperationWithKey:nil];
 }
 
 - (void)sd_cancelCurrentAlternateImageLoad {
-    [self sd_cancelImageLoadOperationWithKey:SDAlternateImageOperationKey];
+    [self sd_cancelImageLoadOperationWithKey:@keypath(self, alternateImage)];
 }
 
-#pragma mark - Private
+#pragma mark - State
 
 - (NSURL *)sd_currentImageURL {
-    return objc_getAssociatedObject(self, @selector(sd_currentImageURL));
+    return [self sd_imageLoadStateForKey:nil].url;
 }
 
-- (void)setSd_currentImageURL:(NSURL *)sd_currentImageURL {
-    objc_setAssociatedObject(self, @selector(sd_currentImageURL), sd_currentImageURL, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
-}
+#pragma mark - Alternate State
 
 - (NSURL *)sd_currentAlternateImageURL {
-    return objc_getAssociatedObject(self, @selector(sd_currentAlternateImageURL));
-}
-
-- (void)setSd_currentAlternateImageURL:(NSURL *)sd_currentAlternateImageURL {
-    objc_setAssociatedObject(self, @selector(sd_currentAlternateImageURL), sd_currentAlternateImageURL, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+    return [self sd_imageLoadStateForKey:@keypath(self, alternateImage)].url;
 }
 
 @end

+ 19 - 0
Pods/SDWebImage/SDWebImage/Core/SDAnimatedImage.h

@@ -88,6 +88,7 @@
 
 /**
  Current animated image format.
+ @note This format is only valid when `animatedImageData` not nil
  */
 @property (nonatomic, assign, readonly) SDImageFormat animatedImageFormat;
 
@@ -107,8 +108,26 @@
 
 // By default, animated image frames are returned by decoding just in time without keeping into memory. But you can choose to preload them into memory as well, See the description in `SDAnimatedImage` protocol.
 // After preloaded, there is no huge difference on performance between this and UIImage's `animatedImageWithImages:duration:`. But UIImage's animation have some issues such like blanking and pausing during segue when using in `UIImageView`. It's recommend to use only if need.
+/**
+ Pre-load all animated image frame into memory. Then later frame image request can directly return the frame for index without decoding.
+ This method may be called on background thread.
+ 
+ @note If one image instance is shared by lots of imageViews, the CPU performance for large animated image will drop down because the request frame index will be random (not in order) and the decoder should take extra effort to keep it re-entrant. You can use this to reduce CPU usage if need. Attention this will consume more memory usage.
+ */
 - (void)preloadAllFrames;
+
+/**
+ Unload all animated image frame from memory if are already pre-loaded. Then later frame image request need decoding. You can use this to free up the memory usage if need.
+ */
 - (void)unloadAllFrames;
+/**
+ Returns a Boolean value indicating whether all animated image frames are already pre-loaded into memory.
+ */
 @property (nonatomic, assign, readonly, getter=isAllFramesLoaded) BOOL allFramesLoaded;
+/**
+ Return the animated image coder if the image is created with `initWithAnimatedCoder:scale:` method.
+ @note We use this with animated coder which conforms to `SDProgressiveImageCoder` for progressive animation decoding.
+ */
+@property (nonatomic, strong, readonly, nullable) id<SDAnimatedImageCoder> animatedCoder;
 
 @end

+ 6 - 0
Pods/SDWebImage/SDWebImage/Core/SDAnimatedImage.m

@@ -54,9 +54,15 @@ static CGFloat SDImageScaleFromPath(NSString *string) {
 
 #if __has_include(<UIKit/UITraitCollection.h>)
 + (instancetype)imageNamed:(NSString *)name inBundle:(NSBundle *)bundle compatibleWithTraitCollection:(UITraitCollection *)traitCollection {
+#if SD_VISION
+    if (!traitCollection) {
+        traitCollection = UITraitCollection.currentTraitCollection;
+    }
+#else
     if (!traitCollection) {
         traitCollection = UIScreen.mainScreen.traitCollection;
     }
+#endif
     CGFloat scale = traitCollection.displayScale;
     return [self imageNamed:name inBundle:bundle scale:scale];
 }

+ 1 - 0
Pods/SDWebImage/SDWebImage/Core/SDAnimatedImagePlayer.h

@@ -10,6 +10,7 @@
 #import "SDWebImageCompat.h"
 #import "SDImageCoder.h"
 
+/// Animated image playback mode
 typedef NS_ENUM(NSUInteger, SDAnimatedImagePlaybackMode) {
     /**
      * From first to last frame and stop or next loop.

+ 37 - 103
Pods/SDWebImage/SDWebImage/Core/SDAnimatedImagePlayer.m

@@ -10,24 +10,24 @@
 #import "NSImage+Compatibility.h"
 #import "SDDisplayLink.h"
 #import "SDDeviceHelper.h"
+#import "SDImageFramePool.h"
 #import "SDInternalMacros.h"
 
 @interface SDAnimatedImagePlayer () {
-    SD_LOCK_DECLARE(_lock);
     NSRunLoopMode _runLoopMode;
 }
 
+@property (nonatomic, strong) SDImageFramePool *framePool;
+
 @property (nonatomic, strong, readwrite) UIImage *currentFrame;
 @property (nonatomic, assign, readwrite) NSUInteger currentFrameIndex;
 @property (nonatomic, assign, readwrite) NSUInteger currentLoopCount;
 @property (nonatomic, strong) id<SDAnimatedImageProvider> animatedProvider;
-@property (nonatomic, strong) NSMutableDictionary<NSNumber *, UIImage *> *frameBuffer;
+@property (nonatomic, assign) NSUInteger currentFrameBytes;
 @property (nonatomic, assign) NSTimeInterval currentTime;
 @property (nonatomic, assign) BOOL bufferMiss;
 @property (nonatomic, assign) BOOL needsDisplayWhenImageBecomesAvailable;
 @property (nonatomic, assign) BOOL shouldReverse;
-@property (nonatomic, assign) NSUInteger maxBufferCount;
-@property (nonatomic, strong) NSOperationQueue *fetchQueue;
 @property (nonatomic, strong) SDDisplayLink *displayLink;
 
 @end
@@ -47,10 +47,7 @@
         self.totalLoopCount = provider.animatedImageLoopCount;
         self.animatedProvider = provider;
         self.playbackRate = 1.0;
-        SD_LOCK_INIT(_lock);
-#if SD_UIKIT
-        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
-#endif
+        self.framePool = [SDImageFramePool registerProvider:provider];
     }
     return self;
 }
@@ -60,47 +57,12 @@
     return player;
 }
 
-#pragma mark - Life Cycle
-
 - (void)dealloc {
-#if SD_UIKIT
-    [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
-#endif
-}
-
-- (void)didReceiveMemoryWarning:(NSNotification *)notification {
-    [_fetchQueue cancelAllOperations];
-    NSOperation *operation = [NSBlockOperation blockOperationWithBlock:^{
-        NSNumber *currentFrameIndex = @(self.currentFrameIndex);
-        SD_LOCK(self->_lock);
-        NSArray *keys = self.frameBuffer.allKeys;
-        // only keep the next frame for later rendering
-        for (NSNumber * key in keys) {
-            if (![key isEqualToNumber:currentFrameIndex]) {
-                [self.frameBuffer removeObjectForKey:key];
-            }
-        }
-        SD_UNLOCK(self->_lock);
-    }];
-    [_fetchQueue addOperation:operation];
+    // Dereference the frame pool, when zero the frame pool for provider will dealloc
+    [SDImageFramePool unregisterProvider:self.animatedProvider];
 }
 
 #pragma mark - Private
-- (NSOperationQueue *)fetchQueue {
-    if (!_fetchQueue) {
-        _fetchQueue = [[NSOperationQueue alloc] init];
-        _fetchQueue.maxConcurrentOperationCount = 1;
-        _fetchQueue.name = @"com.hackemist.SDAnimatedImagePlayer.fetchQueue";
-    }
-    return _fetchQueue;
-}
-
-- (NSMutableDictionary<NSNumber *,UIImage *> *)frameBuffer {
-    if (!_frameBuffer) {
-        _frameBuffer = [NSMutableDictionary dictionary];
-    }
-    return _frameBuffer;
-}
 
 - (SDDisplayLink *)displayLink {
     if (!_displayLink) {
@@ -153,11 +115,11 @@
         UIImage *posterFrame = [[UIImage alloc] initWithCGImage:image.CGImage scale:image.scale orientation:image.imageOrientation];
         #endif
         if (posterFrame) {
+            // Calculate max buffer size
+            [self calculateMaxBufferCountWithFrame:posterFrame];
             // HACK: The first frame should not check duration and immediately display
             self.needsDisplayWhenImageBecomesAvailable = YES;
-            SD_LOCK(self->_lock);
-            self.frameBuffer[@(self.currentFrameIndex)] = posterFrame;
-            SD_UNLOCK(self->_lock);
+            [self.framePool setFrame:posterFrame atIndex:self.currentFrameIndex];
         }
     }
     
@@ -174,9 +136,7 @@
 }
 
 - (void)clearFrameBuffer {
-    SD_LOCK(_lock);
-    [_frameBuffer removeAllObjects];
-    SD_UNLOCK(_lock);
+    [self.framePool removeAllFrames];
 }
 
 #pragma mark - Animation Control
@@ -184,12 +144,9 @@
     [self.displayLink start];
     // Setup frame
     [self setupCurrentFrame];
-    // Calculate max buffer size
-    [self calculateMaxBufferCount];
 }
 
 - (void)stopPlaying {
-    [_fetchQueue cancelAllOperations];
     // Using `_displayLink` here because when UIImageView dealloc, it may trigger `[self stopAnimating]`, we already release the display link in SDAnimatedImageView's dealloc method.
     [_displayLink stop];
     // We need to reset the frame status, but not trigger any handle. This can ensure next time's playing status correct.
@@ -197,7 +154,6 @@
 }
 
 - (void)pausePlaying {
-    [_fetchQueue cancelAllOperations];
     [_displayLink stop];
 }
 
@@ -259,26 +215,11 @@
     
     
     // Check if we need to display new frame firstly
-    BOOL bufferFull = NO;
     if (self.needsDisplayWhenImageBecomesAvailable) {
-        UIImage *currentFrame;
-        SD_LOCK(_lock);
-        currentFrame = self.frameBuffer[@(currentFrameIndex)];
-        SD_UNLOCK(_lock);
+        UIImage *currentFrame = [self.framePool frameAtIndex:currentFrameIndex];
         
         // Update the current frame
         if (currentFrame) {
-            SD_LOCK(_lock);
-            // Remove the frame buffer if need
-            if (self.frameBuffer.count > self.maxBufferCount) {
-                self.frameBuffer[@(currentFrameIndex)] = nil;
-            }
-            // Check whether we can stop fetch
-            if (self.frameBuffer.count == totalFrameCount) {
-                bufferFull = YES;
-            }
-            SD_UNLOCK(_lock);
-            
             // Update the current frame immediately
             self.currentFrame = currentFrame;
             [self handleFrameChange];
@@ -300,8 +241,7 @@
         if (self.currentTime < currentDuration) {
             // Current frame timestamp not reached, prefetch frame in advance.
             [self prefetchFrameAtIndex:currentFrameIndex
-                             nextIndex:nextFrameIndex
-                            bufferFull:bufferFull];
+                             nextIndex:nextFrameIndex];
             return;
         }
         
@@ -337,43 +277,29 @@
     }
     
     [self prefetchFrameAtIndex:currentFrameIndex
-                     nextIndex:nextFrameIndex
-                    bufferFull:bufferFull];
+                     nextIndex:nextFrameIndex];
 }
 
 // Check if we should prefetch next frame or current frame
 // When buffer miss, means the decode speed is slower than render speed, we fetch current miss frame
 // Or, most cases, the decode speed is faster than render speed, we fetch next frame
 - (void)prefetchFrameAtIndex:(NSUInteger)currentIndex
-                   nextIndex:(NSUInteger)nextIndex
-                  bufferFull:(BOOL)bufferFull {
+                   nextIndex:(NSUInteger)nextIndex {
     NSUInteger fetchFrameIndex = currentIndex;
     UIImage *fetchFrame = nil;
     if (!self.bufferMiss) {
         fetchFrameIndex = nextIndex;
-        SD_LOCK(_lock);
-        fetchFrame = self.frameBuffer[@(nextIndex)];
-        SD_UNLOCK(_lock);
+        fetchFrame = [self.framePool frameAtIndex:nextIndex];
     }
-    if (!fetchFrame && !bufferFull && self.fetchQueue.operationCount == 0) {
-        // Prefetch next frame in background queue
-        id<SDAnimatedImageProvider> animatedProvider = self.animatedProvider;
-        @weakify(self);
-        NSOperation *operation = [NSBlockOperation blockOperationWithBlock:^{
-            @strongify(self);
-            if (!self) {
-                return;
-            }
-            UIImage *frame = [animatedProvider animatedImageFrameAtIndex:fetchFrameIndex];
-
-            BOOL isAnimating = self.displayLink.isRunning;
-            if (isAnimating) {
-                SD_LOCK(self->_lock);
-                self.frameBuffer[@(fetchFrameIndex)] = frame;
-                SD_UNLOCK(self->_lock);
-            }
-        }];
-        [self.fetchQueue addOperation:operation];
+    BOOL bufferFull = NO;
+    if (self.framePool.currentFrameCount == self.totalFrameCount) {
+        bufferFull = YES;
+    }
+    if (!fetchFrame && !bufferFull) {
+        // Calculate max buffer size
+        [self calculateMaxBufferCountWithFrame:self.currentFrame];
+        // Prefetch next frame
+        [self.framePool prefetchFrameAtIndex:fetchFrameIndex];
     }
 }
 
@@ -390,9 +316,17 @@
 }
 
 #pragma mark - Util
-- (void)calculateMaxBufferCount {
-    NSUInteger bytes = CGImageGetBytesPerRow(self.currentFrame.CGImage) * CGImageGetHeight(self.currentFrame.CGImage);
-    if (bytes == 0) bytes = 1024;
+- (void)calculateMaxBufferCountWithFrame:(nonnull UIImage *)frame {
+    NSUInteger bytes = self.currentFrameBytes;
+    if (bytes == 0) {
+        bytes = CGImageGetBytesPerRow(frame.CGImage) * CGImageGetHeight(frame.CGImage);
+        if (bytes == 0) {
+            bytes = 1024;
+        } else {
+            // Cache since most animated image each frame bytes is the same
+            self.currentFrameBytes = bytes;
+        }
+    }
     
     NSUInteger max = 0;
     if (self.maxBufferSize > 0) {
@@ -410,7 +344,7 @@
         maxBufferCount = 1;
     }
     
-    self.maxBufferCount = maxBufferCount;
+    self.framePool.maxBufferCount = maxBufferCount;
 }
 
 + (NSString *)defaultRunLoopMode {

+ 10 - 0
Pods/SDWebImage/SDWebImage/Core/SDAnimatedImageRep.h

@@ -10,6 +10,8 @@
 
 #if SD_MAC
 
+#import "NSData+ImageContentType.h"
+
 /**
  A subclass of `NSBitmapImageRep` to fix that GIF duration issue because `NSBitmapImageRep` will reset `NSImageCurrentFrameDuration` by using `kCGImagePropertyGIFDelayTime` but not `kCGImagePropertyGIFUnclampedDelayTime`.
  This also fix the GIF loop count issue, which will use the Netscape standard (See http://www6.uniovi.es/gifanim/gifabout.htm)  to only place once when the `kCGImagePropertyGIFLoopCount` is nil. This is what modern browser's behavior.
@@ -18,6 +20,14 @@
  */
 @interface SDAnimatedImageRep : NSBitmapImageRep
 
+/// Current animated image format.
+/// @note This format is only valid when `animatedImageData` not nil
+@property (nonatomic, assign, readonly) SDImageFormat animatedImageFormat;
+
+/// This allows to retrive the compressed data like GIF using `sd_imageData` on parent `NSImage`, without re-encoding (waste CPU and RAM)
+/// @note This is typically nonnull when you create with `initWithData:`, even it's marked as weak, because ImageIO retain it
+@property (nonatomic, readonly, nullable, weak) NSData *animatedImageData;
+
 @end
 
 #endif

+ 16 - 3
Pods/SDWebImage/SDWebImage/Core/SDAnimatedImageRep.m

@@ -19,6 +19,7 @@
 @interface SDAnimatedImageRep ()
 /// This wrap the animated image frames for legacy animated image coder API (`encodedDataWithImage:`).
 @property (nonatomic, readwrite, weak) NSArray<SDImageFrame *> *frames;
+@property (nonatomic, assign, readwrite) SDImageFormat animatedImageFormat;
 @end
 
 @implementation SDAnimatedImageRep {
@@ -33,9 +34,12 @@
 }
 
 - (instancetype)copyWithZone:(NSZone *)zone {
-  SDAnimatedImageRep *imageRep = [super copyWithZone:zone];
-  CFRetain(imageRep->_imageSource);
-  return imageRep;
+    SDAnimatedImageRep *imageRep = [super copyWithZone:zone];
+    // super will copy all ivars
+    if (imageRep->_imageSource) {
+        CFRetain(imageRep->_imageSource);
+    }
+    return imageRep;
 }
 
 // `NSBitmapImageRep`'s `imageRepWithData:` is not designed initializer
@@ -63,15 +67,19 @@
         if (!type) {
             return self;
         }
+        _animatedImageData = data; // CGImageSource will retain the data internally, no extra copy
+        SDImageFormat format = SDImageFormatUndefined;
         if (CFStringCompare(type, kSDUTTypeGIF, 0) == kCFCompareEqualTo) {
             // GIF
             // Fix the `NSBitmapImageRep` GIF loop count calculation issue
             // Which will use 0 when there are no loop count information metadata in GIF data
+            format = SDImageFormatGIF;
             NSUInteger loopCount = [SDImageGIFCoder imageLoopCountWithSource:imageSource];
             [self setProperty:NSImageLoopCount withValue:@(loopCount)];
         } else if (CFStringCompare(type, kSDUTTypePNG, 0) == kCFCompareEqualTo) {
             // APNG
             // Do initialize about frame count, current frame/duration and loop count
+            format = SDImageFormatPNG;
             [self setProperty:NSImageFrameCount withValue:@(frameCount)];
             [self setProperty:NSImageCurrentFrame withValue:@(0)];
             NSUInteger loopCount = [SDImageAPNGCoder imageLoopCountWithSource:imageSource];
@@ -79,6 +87,7 @@
         } else if (CFStringCompare(type, kSDUTTypeHEICS, 0) == kCFCompareEqualTo) {
             // HEIC
             // Do initialize about frame count, current frame/duration and loop count
+            format = SDImageFormatHEIC;
             [self setProperty:NSImageFrameCount withValue:@(frameCount)];
             [self setProperty:NSImageCurrentFrame withValue:@(0)];
             NSUInteger loopCount = [SDImageHEICCoder imageLoopCountWithSource:imageSource];
@@ -86,11 +95,15 @@
         } else if (CFStringCompare(type, kSDUTTypeWebP, 0) == kCFCompareEqualTo) {
             // WebP
             // Do initialize about frame count, current frame/duration and loop count
+            format = SDImageFormatWebP;
             [self setProperty:NSImageFrameCount withValue:@(frameCount)];
             [self setProperty:NSImageCurrentFrame withValue:@(0)];
             NSUInteger loopCount = [SDImageAWebPCoder imageLoopCountWithSource:imageSource];
             [self setProperty:NSImageLoopCount withValue:@(loopCount)];
+        } else {
+            format = [NSData sd_imageFormatForImageData:data];
         }
+        _animatedImageFormat = format;
     }
     return self;
 }

+ 15 - 11
Pods/SDWebImage/SDWebImage/Core/SDCallbackQueue.m

@@ -20,7 +20,9 @@ static void SDReleaseBlock(void *context) {
     CFRelease(context);
 }
 
-static void inline SDSafeExecute(dispatch_queue_t _Nonnull queue, dispatch_block_t _Nonnull block, BOOL async) {
+static void SDSafeExecute(SDCallbackQueue *callbackQueue, dispatch_block_t _Nonnull block, BOOL async) {
+    // Extendc gcd queue's life cycle
+    dispatch_queue_t queue = callbackQueue.queue;
     // Special handle for main queue label only (custom queue can have the same label)
     const char *label = dispatch_queue_get_label(queue);
     if (label && label == dispatch_queue_get_label(dispatch_get_main_queue())) {
@@ -32,16 +34,18 @@ static void inline SDSafeExecute(dispatch_queue_t _Nonnull queue, dispatch_block
     }
     // Check specific to detect queue equal
     void *specific = dispatch_queue_get_specific(queue, SDCallbackQueueKey);
-    void *currentSpecific = dispatch_get_specific(SDCallbackQueueKey);
-    if (specific && currentSpecific && CFGetTypeID(specific) == CFUUIDGetTypeID() && CFGetTypeID(currentSpecific) == CFUUIDGetTypeID() && CFEqual(specific, currentSpecific)) {
-        block();
-    } else {
-        if (async) {
-            dispatch_async(queue, block);
-        } else {
-            dispatch_sync(queue, block);
+    if (specific && CFGetTypeID(specific) == CFUUIDGetTypeID()) {
+        void *currentSpecific = dispatch_get_specific(SDCallbackQueueKey);
+        if (currentSpecific && CFGetTypeID(currentSpecific) == CFUUIDGetTypeID() && CFEqual(specific, currentSpecific)) {
+            block();
+            return;
         }
     }
+    if (async) {
+        dispatch_async(queue, block);
+    } else {
+        dispatch_sync(queue, block);
+    }
 }
 
 @implementation SDCallbackQueue
@@ -82,7 +86,7 @@ static void inline SDSafeExecute(dispatch_queue_t _Nonnull queue, dispatch_block
 - (void)sync:(nonnull dispatch_block_t)block {
     switch (self.policy) {
         case SDCallbackPolicySafeExecute:
-            SDSafeExecute(self.queue, block, NO);
+            SDSafeExecute(self, block, NO);
             break;
         case SDCallbackPolicyDispatch:
             dispatch_sync(self.queue, block);
@@ -96,7 +100,7 @@ static void inline SDSafeExecute(dispatch_queue_t _Nonnull queue, dispatch_block
 - (void)async:(nonnull dispatch_block_t)block {
     switch (self.policy) {
         case SDCallbackPolicySafeExecute:
-            SDSafeExecute(self.queue, block, YES);
+            SDSafeExecute(self, block, YES);
             break;
         case SDCallbackPolicyDispatch:
             dispatch_async(self.queue, block);

+ 6 - 0
Pods/SDWebImage/SDWebImage/Core/SDGraphicsImageRenderer.h

@@ -17,11 +17,17 @@
  For others (macOS/watchOS or iOS/tvOS 10-), these method use the `SDImageGraphics.h` to implements the same behavior (but without dynamic bitmap support)
 */
 
+/// A closure for drawing an image.
 typedef void (^SDGraphicsImageDrawingActions)(CGContextRef _Nonnull context);
+/// Constants that specify the color range of the image renderer context.
 typedef NS_ENUM(NSInteger, SDGraphicsImageRendererFormatRange) {
+    /// The image renderer context doesn’t specify a color range.
     SDGraphicsImageRendererFormatRangeUnspecified = -1,
+    /// The system automatically chooses the image renderer context’s pixel format according to the color range of its content.
     SDGraphicsImageRendererFormatRangeAutomatic = 0,
+    /// The image renderer context supports wide color.
     SDGraphicsImageRendererFormatRangeExtended,
+    /// The image renderer context doesn’t support extended colors.
     SDGraphicsImageRendererFormatRangeStandard
 };
 

+ 12 - 4
Pods/SDWebImage/SDWebImage/Core/SDGraphicsImageRenderer.m

@@ -72,7 +72,9 @@
 #pragma clang diagnostic push
 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
 - (SDGraphicsImageRendererFormatRange)preferredRange {
-#if SD_UIKIT
+#if SD_VISION
+  return (SDGraphicsImageRendererFormatRange)self.uiformat.preferredRange;
+#elif SD_UIKIT
     if (@available(iOS 10.0, tvOS 10.10, *)) {
         if (@available(iOS 12.0, tvOS 12.0, *)) {
             return (SDGraphicsImageRendererFormatRange)self.uiformat.preferredRange;
@@ -93,7 +95,9 @@
 }
 
 - (void)setPreferredRange:(SDGraphicsImageRendererFormatRange)preferredRange {
-#if SD_UIKIT
+#if SD_VISION
+  self.uiformat.preferredRange = (UIGraphicsImageRendererFormatRange)preferredRange;
+#elif SD_UIKIT
     if (@available(iOS 10.0, tvOS 10.10, *)) {
         if (@available(iOS 12.0, tvOS 12.0, *)) {
             self.uiformat.preferredRange = (UIGraphicsImageRendererFormatRange)preferredRange;
@@ -127,7 +131,9 @@
             self.uiformat = uiformat;
         } else {
 #endif
-#if SD_WATCH
+#if SD_VISION
+            CGFloat screenScale = UITraitCollection.currentTraitCollection.displayScale;
+#elif SD_WATCH
             CGFloat screenScale = [WKInterfaceDevice currentDevice].screenScale;
 #elif SD_UIKIT
             CGFloat screenScale = [UIScreen mainScreen].scale;
@@ -167,7 +173,9 @@
             self.uiformat = uiformat;
         } else {
 #endif
-#if SD_WATCH
+#if SD_VISION
+            CGFloat screenScale = UITraitCollection.currentTraitCollection.displayScale;
+#elif SD_WATCH
             CGFloat screenScale = [WKInterfaceDevice currentDevice].screenScale;
 #elif SD_UIKIT
             CGFloat screenScale = [UIScreen mainScreen].scale;

+ 3 - 1
Pods/SDWebImage/SDWebImage/Core/SDImageCache.h

@@ -37,8 +37,10 @@ typedef NS_OPTIONS(NSUInteger, SDImageCacheOptions) {
     /**
      * By default, we will decode the image in the background during cache query and download from the network. This can help to improve performance because when rendering image on the screen, it need to be firstly decoded. But this happen on the main queue by Core Animation.
      * However, this process may increase the memory usage as well. If you are experiencing a issue due to excessive memory consumption, This flag can prevent decode the image.
+     * @note 5.14.0 introduce `SDImageCoderDecodeUseLazyDecoding`, use that for better control from codec, instead of post-processing. Which acts the similar like this option but works for SDAnimatedImage as well (this one does not)
+     * @deprecated Deprecated in v5.17.0, if you don't want force-decode, pass [.imageForceDecodePolicy] = [SDImageForceDecodePolicy.never] in context option
      */
-    SDImageCacheAvoidDecodeImage = 1 << 4,
+    SDImageCacheAvoidDecodeImage API_DEPRECATED("Use SDWebImageContextImageForceDecodePolicy instead", macos(10.10, 10.10), ios(8.0, 8.0), tvos(9.0, 9.0), watchos(2.0, 2.0)) = 1 << 4,
     /**
      * By default, we decode the animated image. This flag can force decode the first frame only and produce the static image.
      */

+ 21 - 9
Pods/SDWebImage/SDWebImage/Core/SDImageCache.m

@@ -262,16 +262,16 @@ static NSString * _defaultDiskCacheDirectory;
             SDImageFormat format = image.sd_imageFormat;
             if (format == SDImageFormatUndefined) {
                 // If image is animated, use GIF (APNG may be better, but has bugs before macOS 10.14)
-                if (image.sd_isAnimated) {
+                if (image.sd_imageFrameCount > 1) {
                     format = SDImageFormatGIF;
                 } else {
                     // If we do not have any data to detect image format, check whether it contains alpha channel to use PNG or JPEG format
                     format = [SDImageCoderHelper CGImageContainsAlpha:image.CGImage] ? SDImageFormatPNG : SDImageFormatJPEG;
                 }
             }
-            NSData *data = [[SDImageCodersManager sharedManager] encodedDataWithImage:image format:format options:context[SDWebImageContextImageEncodeOptions]];
+            NSData *encodedData = [[SDImageCodersManager sharedManager] encodedDataWithImage:image format:format options:context[SDWebImageContextImageEncodeOptions]];
             dispatch_async(self.ioQueue, ^{
-                [self _storeImageDataToDisk:data forKey:key];
+                [self _storeImageDataToDisk:encodedData forKey:key];
                 [self _archivedDataWithImage:image forKey:key];
                 if (completionBlock) {
                     [(queue ?: SDCallbackQueue.mainQueue) async:^{
@@ -461,7 +461,7 @@ static NSString * _defaultDiskCacheDirectory;
     if (image) {
         if (options & SDImageCacheDecodeFirstFrameOnly) {
             // Ensure static image
-            if (image.sd_isAnimated) {
+            if (image.sd_imageFrameCount > 1) {
 #if SD_MAC
                 image = [[NSImage alloc] initWithCGImage:image.CGImage scale:image.scale orientation:kCGImagePropertyOrientationUp];
 #else
@@ -593,7 +593,7 @@ static NSString * _defaultDiskCacheDirectory;
     if (image) {
         if (options & SDImageCacheDecodeFirstFrameOnly) {
             // Ensure static image
-            if (image.sd_isAnimated) {
+            if (image.sd_imageFrameCount > 1) {
 #if SD_MAC
                 image = [[NSImage alloc] initWithCGImage:image.CGImage scale:image.scale orientation:kCGImagePropertyOrientationUp];
 #else
@@ -668,11 +668,17 @@ static NSString * _defaultDiskCacheDirectory;
                 // Query full size cache key which generate a thumbnail, should not write back to full size memory cache
                 shouldCacheToMomery = NO;
             }
+            // Special case: If user query image in list for the same URL, to avoid decode and write **same** image object into disk cache multiple times, we query and check memory cache here again.
+            if (shouldCacheToMomery && self.config.shouldCacheImagesInMemory) {
+                diskImage = [self.memoryCache objectForKey:key];
+            }
             // decode image data only if in-memory cache missed
-            diskImage = [self diskImageForKey:key data:diskData options:options context:context];
-            if (shouldCacheToMomery && diskImage && self.config.shouldCacheImagesInMemory) {
-                NSUInteger cost = diskImage.sd_memoryCost;
-                [self.memoryCache setObject:diskImage forKey:key cost:cost];
+            if (!diskImage) {
+                diskImage = [self diskImageForKey:key data:diskData options:options context:context];
+                if (shouldCacheToMomery && diskImage && self.config.shouldCacheImagesInMemory) {
+                    NSUInteger cost = diskImage.sd_memoryCost;
+                    [self.memoryCache setObject:diskImage forKey:key cost:cost];
+                }
             }
         }
         return diskImage;
@@ -877,6 +883,8 @@ static NSString * _defaultDiskCacheDirectory;
 }
 
 #pragma mark - Helper
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
 + (SDWebImageOptions)imageOptionsFromCacheOptions:(SDImageCacheOptions)cacheOptions {
     SDWebImageOptions options = 0;
     if (cacheOptions & SDImageCacheScaleDownLargeImages) options |= SDWebImageScaleDownLargeImages;
@@ -887,6 +895,7 @@ static NSString * _defaultDiskCacheDirectory;
     
     return options;
 }
+#pragma clang diagnostic pop
 
 @end
 
@@ -898,6 +907,8 @@ static NSString * _defaultDiskCacheDirectory;
     return [self queryImageForKey:key options:options context:context cacheType:SDImageCacheTypeAll completion:completionBlock];
 }
 
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
 - (id<SDWebImageOperation>)queryImageForKey:(NSString *)key options:(SDWebImageOptions)options context:(nullable SDWebImageContext *)context cacheType:(SDImageCacheType)cacheType completion:(nullable SDImageCacheQueryCompletionBlock)completionBlock {
     SDImageCacheOptions cacheOptions = 0;
     if (options & SDWebImageQueryMemoryData) cacheOptions |= SDImageCacheQueryMemoryData;
@@ -911,6 +922,7 @@ static NSString * _defaultDiskCacheDirectory;
     
     return [self queryCacheOperationForKey:key options:cacheOptions context:context cacheType:cacheType done:completionBlock];
 }
+#pragma clang diagnostic pop
 
 - (void)storeImage:(UIImage *)image imageData:(NSData *)imageData forKey:(nullable NSString *)key cacheType:(SDImageCacheType)cacheType completion:(nullable SDWebImageNoParamsBlock)completionBlock {
     [self storeImage:image imageData:imageData forKey:key options:0 context:nil cacheType:cacheType completion:completionBlock];

+ 18 - 12
Pods/SDWebImage/SDWebImage/Core/SDImageCacheDefine.m

@@ -18,14 +18,14 @@
 SDImageCoderOptions * _Nonnull SDGetDecodeOptionsFromContext(SDWebImageContext * _Nullable context, SDWebImageOptions options, NSString * _Nonnull cacheKey) {
     BOOL decodeFirstFrame = SD_OPTIONS_CONTAINS(options, SDWebImageDecodeFirstFrameOnly);
     NSNumber *scaleValue = context[SDWebImageContextImageScaleFactor];
-    CGFloat scale = scaleValue.doubleValue >= 1 ? scaleValue.doubleValue : SDImageScaleFactorForKey(cacheKey);
+    CGFloat scale = scaleValue.doubleValue >= 1 ? scaleValue.doubleValue : SDImageScaleFactorForKey(cacheKey); // Use cache key to detect scale
     NSNumber *preserveAspectRatioValue = context[SDWebImageContextImagePreserveAspectRatio];
     NSValue *thumbnailSizeValue;
     BOOL shouldScaleDown = SD_OPTIONS_CONTAINS(options, SDWebImageScaleDownLargeImages);
-    if (shouldScaleDown) {
-        CGFloat thumbnailPixels = SDImageCoderHelper.defaultScaleDownLimitBytes / 4;
-        CGFloat dimension = ceil(sqrt(thumbnailPixels));
-        thumbnailSizeValue = @(CGSizeMake(dimension, dimension));
+    NSNumber *scaleDownLimitBytesValue = context[SDWebImageContextImageScaleDownLimitBytes];
+    if (scaleDownLimitBytesValue == nil && shouldScaleDown) {
+        // Use the default limit bytes
+        scaleDownLimitBytesValue = @(SDImageCoderHelper.defaultScaleDownLimitBytes);
     }
     if (context[SDWebImageContextImageThumbnailPixelSize]) {
         thumbnailSizeValue = context[SDWebImageContextImageThumbnailPixelSize];
@@ -56,6 +56,7 @@ SDImageCoderOptions * _Nonnull SDGetDecodeOptionsFromContext(SDWebImageContext *
     mutableCoderOptions[SDImageCoderDecodeThumbnailPixelSize] = thumbnailSizeValue;
     mutableCoderOptions[SDImageCoderDecodeTypeIdentifierHint] = typeIdentifierHint;
     mutableCoderOptions[SDImageCoderDecodeFileExtensionHint] = fileExtensionHint;
+    mutableCoderOptions[SDImageCoderDecodeScaleDownLimitBytes] = scaleDownLimitBytesValue;
     
     return [mutableCoderOptions copy];
 }
@@ -70,6 +71,7 @@ void SDSetDecodeOptionsToContext(SDWebImageMutableContext * _Nonnull mutableCont
     mutableContext[SDWebImageContextImageScaleFactor] = decodeOptions[SDImageCoderDecodeScaleFactor];
     mutableContext[SDWebImageContextImagePreserveAspectRatio] = decodeOptions[SDImageCoderDecodePreserveAspectRatio];
     mutableContext[SDWebImageContextImageThumbnailPixelSize] = decodeOptions[SDImageCoderDecodeThumbnailPixelSize];
+    mutableContext[SDWebImageContextImageScaleDownLimitBytes] = decodeOptions[SDImageCoderDecodeScaleDownLimitBytes];
     
     NSString *typeIdentifierHint = decodeOptions[SDImageCoderDecodeTypeIdentifierHint];
     if (!typeIdentifierHint) {
@@ -121,15 +123,19 @@ UIImage * _Nullable SDImageCacheDecodeImageData(NSData * _Nonnull imageData, NSS
         image = [imageCoder decodedImageWithData:imageData options:coderOptions];
     }
     if (image) {
-        BOOL shouldDecode = !SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage);
-        BOOL lazyDecode = [coderOptions[SDImageCoderDecodeUseLazyDecoding] boolValue];
-        if (lazyDecode) {
-            // lazyDecode = NO means we should not forceDecode, highest priority
-            shouldDecode = NO;
+        SDImageForceDecodePolicy policy = SDImageForceDecodePolicyAutomatic;
+        NSNumber *polivyValue = context[SDWebImageContextImageForceDecodePolicy];
+        if (polivyValue != nil) {
+            policy = polivyValue.unsignedIntegerValue;
         }
-        if (shouldDecode) {
-            image = [SDImageCoderHelper decodedImageWithImage:image];
+        // TODO: Deprecated, remove in SD 6.0...
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+        if (SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage)) {
+            policy = SDImageForceDecodePolicyNever;
         }
+#pragma clang diagnostic pop
+        image = [SDImageCoderHelper decodedImageWithImage:image policy:policy];
         // assign the decode options, to let manager check whether to re-decode if needed
         image.sd_decodeOptions = coderOptions;
     }

+ 13 - 0
Pods/SDWebImage/SDWebImage/Core/SDImageCoder.h

@@ -75,6 +75,19 @@ FOUNDATION_EXPORT SDImageCoderOption _Nonnull const SDImageCoderDecodeTypeIdenti
  */
 FOUNDATION_EXPORT SDImageCoderOption _Nonnull const SDImageCoderDecodeUseLazyDecoding;
 
+/**
+ A NSUInteger value to provide the limit bytes during decoding. This can help to avoid OOM on large frame count animated image or large pixel static image when you don't know how much RAM it occupied before decoding
+ The decoder will do these logic based on limit bytes:
+ 1. Get the total frame count (static image means 1)
+ 2. Calculate the `framePixelSize` width/height to `sqrt(limitBytes / frameCount / bytesPerPixel)`, keeping aspect ratio (at least 1x1)
+ 3. If the `framePixelSize < originalImagePixelSize`, then do thumbnail decoding (see `SDImageCoderDecodeThumbnailPixelSize`) use the `framePixelSize` and `preseveAspectRatio = YES`
+ 4. Else, use the full pixel decoding (small than limit bytes)
+ 5. Whatever result, this does not effect the animated/static behavior of image. So even if you set `limitBytes = 1 && frameCount = 100`, we will stll create animated image with each frame `1x1` pixel size.
+ @note You can use the logic from `+[SDImageCoder scaledSizeWithImageSize:limitBytes:bytesPerPixel:frameCount:]`
+ @note This option has higher priority than `.decodeThumbnailPixelSize`
+ */
+FOUNDATION_EXPORT SDImageCoderOption _Nonnull const SDImageCoderDecodeScaleDownLimitBytes;
+
 // These options are for image encoding
 /**
  A Boolean value indicating whether to encode the first frame only for animated image during encoding. (NSNumber). If not provide, encode animated image if need.

+ 1 - 0
Pods/SDWebImage/SDWebImage/Core/SDImageCoder.m

@@ -15,6 +15,7 @@ SDImageCoderOption const SDImageCoderDecodeThumbnailPixelSize = @"decodeThumbnai
 SDImageCoderOption const SDImageCoderDecodeFileExtensionHint = @"decodeFileExtensionHint";
 SDImageCoderOption const SDImageCoderDecodeTypeIdentifierHint = @"decodeTypeIdentifierHint";
 SDImageCoderOption const SDImageCoderDecodeUseLazyDecoding = @"decodeUseLazyDecoding";
+SDImageCoderOption const SDImageCoderDecodeScaleDownLimitBytes = @"decodeScaleDownLimitBytes";
 
 SDImageCoderOption const SDImageCoderEncodeFirstFrameOnly = @"encodeFirstFrameOnly";
 SDImageCoderOption const SDImageCoderEncodeCompressionQuality = @"encodeCompressionQuality";

+ 76 - 2
Pods/SDWebImage/SDWebImage/Core/SDImageCoderHelper.h

@@ -10,6 +10,7 @@
 #import "SDWebImageCompat.h"
 #import "SDImageFrame.h"
 
+/// The options controls how we force pre-draw the image (to avoid lazy-decoding). Which need OS's framework compatibility
 typedef NS_ENUM(NSUInteger, SDImageCoderDecodeSolution) {
     /// automatically choose the solution based on image format, hardware, OS version. This keep balance for compatibility and performance. Default after SDWebImage 5.13.0
     SDImageCoderDecodeSolutionAutomatic,
@@ -19,6 +20,35 @@ typedef NS_ENUM(NSUInteger, SDImageCoderDecodeSolution) {
     SDImageCoderDecodeSolutionUIKit
 };
 
+/// The policy to force-decode the origin CGImage (produced by Image Coder Plugin)
+/// Some CGImage may be lazy, or not lazy, but need extra copy to render on screen
+/// The force-decode step help to `pre-process` to get the best suitable CGImage to render, which can increase frame rate
+/// The downside is that force-decode may consume RAM and CPU, and may loss the `lazy` support (lazy CGImage can be purged when memory warning, and re-created if need), see more: `SDImageCoderDecodeUseLazyDecoding`
+typedef NS_ENUM(NSUInteger, SDImageForceDecodePolicy) {
+    /// Based on input CGImage's colorspace, alignment, bitmapinfo, if it may trigger `CA::copy_image` extra copy, we will force-decode, else don't
+    SDImageForceDecodePolicyAutomatic,
+    /// Never force decode input CGImage
+    SDImageForceDecodePolicyNever,
+    /// Always force decode input CGImage (only once)
+    SDImageForceDecodePolicyAlways
+};
+
+/// Byte alignment the bytes size with alignment
+/// - Parameters:
+///   - size: The bytes size
+///   - alignment: The alignment, in bytes
+static inline size_t SDByteAlign(size_t size, size_t alignment) {
+    return ((size + (alignment - 1)) / alignment) * alignment;
+}
+
+/// The pixel format about the information to call `CGImageCreate` suitable for current hardware rendering
+typedef struct SDImagePixelFormat {
+    /// Typically is pre-multiplied RGBA8888 for alpha image, RGBX8888 for non-alpha image.
+    CGBitmapInfo bitmapInfo;
+    /// Typically is 32, the 8 pixels bytesPerRow.
+    size_t alignment;
+} SDImagePixelFormat;
+
 /**
  Provide some common helper methods for building the image decoder/encoder.
  */
@@ -44,16 +74,31 @@ typedef NS_ENUM(NSUInteger, SDImageCoderDecodeSolution) {
  */
 + (NSArray<SDImageFrame *> * _Nullable)framesFromAnimatedImage:(UIImage * _Nullable)animatedImage NS_SWIFT_NAME(frames(from:));
 
+#pragma mark - Preferred Rendering Format
+/// For coders who use `CGImageCreate`, use the information below to create an effient CGImage which can be render on GPU without Core Animation's extra copy (`CA::Render::copy_image`), which can be debugged using `Color Copied Image` in Xcode Instruments
+/// `CGImageCreate`'s `bytesPerRow`, `space`, `bitmapInfo` params should use the information below.
 /**
  Return the shared device-dependent RGB color space. This follows The Get Rule.
- On iOS, it's created with deviceRGB (if available, use sRGB).
- On macOS, it's from the screen colorspace (if failed, use deviceRGB)
  Because it's shared, you should not retain or release this object.
+ Typically is sRGB for iOS, screen color space (like Color LCD) for macOS.
  
  @return The device-dependent RGB color space
  */
 + (CGColorSpaceRef _Nonnull)colorSpaceGetDeviceRGB CF_RETURNS_NOT_RETAINED;
 
+/**
+ Tthis returns the pixel format **Preferred from current hardward && OS using runtime detection**
+ @param containsAlpha Whether the image to render contains alpha channel
+ */
++ (SDImagePixelFormat)preferredPixelFormat:(BOOL)containsAlpha;
+
+/**
+ Check whether CGImage is hardware supported to rendering on screen, without the trigger of `CA::Render::copy_image`
+ You can debug the copied image by using Xcode's `Color Copied Image`, the copied image will turn Cyan and occupy double RAM for bitmap buffer.
+ Typically, when the CGImage's using the method above (`colorspace` / `alignment` / `bitmapInfo`) can render withtout the copy.
+ */
++ (BOOL)CGImageIsHardwareSupported:(_Nonnull CGImageRef)cgImage;
+
 /**
  Check whether CGImage contains alpha channel.
  
@@ -86,6 +131,8 @@ typedef NS_ENUM(NSUInteger, SDImageCoderDecodeSolution) {
  Create a scaled CGImage by the provided CGImage and size. This follows The Create Rule and you are response to call release after usage.
  It will detect whether the image size matching the scale size, if not, stretch the image to the target size.
  @note If you need to keep aspect ratio, you can calculate the scale size by using `scaledSizeWithImageSize` first.
+ @note This scale does not change bits per components (which means RGB888 in, RGB888 out), supports 8/16/32(float) bpc. But the method in UIImage+Transform does not gurantee this.
+ @note All supported CGImage pixel format: https://developer.apple.com/library/archive/documentation/GraphicsImaging/Conceptual/drawingwithquartz2d/dq_context/dq_context.html#//apple_ref/doc/uid/TP30001066-CH203-BCIBHHBB
  
  @param cgImage The CGImage
  @param size The scale size in pixel.
@@ -103,23 +150,50 @@ typedef NS_ENUM(NSUInteger, SDImageCoderDecodeSolution) {
  */
 + (CGSize)scaledSizeWithImageSize:(CGSize)imageSize scaleSize:(CGSize)scaleSize preserveAspectRatio:(BOOL)preserveAspectRatio shouldScaleUp:(BOOL)shouldScaleUp;
 
+/// Calculate the limited image size with the bytes, when using `SDImageCoderDecodeScaleDownLimitBytes`. This preserve aspect ratio and never scale up
+/// @param imageSize The image size (in pixel or point defined by caller)
+/// @param limitBytes The limit bytes
+/// @param bytesPerPixel The bytes per pixel
+/// @param frameCount The image frame count, 0 means 1 frame as well
++ (CGSize)scaledSizeWithImageSize:(CGSize)imageSize limitBytes:(NSUInteger)limitBytes bytesPerPixel:(NSUInteger)bytesPerPixel frameCount:(NSUInteger)frameCount;
 /**
  Return the decoded image by the provided image. This one unlike `CGImageCreateDecoded:`, will not decode the image which contains alpha channel or animated image. On iOS 15+, this may use `UIImage.preparingForDisplay()` to use CMPhoto for better performance than the old solution.
  @param image The image to be decoded
+ @note This translate to `decodedImageWithImage:policy:` with automatic policy
  @return The decoded image
  */
 + (UIImage * _Nullable)decodedImageWithImage:(UIImage * _Nullable)image;
 
+/**
+ Return the decoded image by the provided image. This one unlike `CGImageCreateDecoded:`, will not decode the image which contains alpha channel or animated image. On iOS 15+, this may use `UIImage.preparingForDisplay()` to use CMPhoto for better performance than the old solution.
+ @param image The image to be decoded
+ @param policy The force decode policy to decode image, will effect the check whether input image need decode
+ @return The decoded image
+ */
++ (UIImage * _Nullable)decodedImageWithImage:(UIImage * _Nullable)image policy:(SDImageForceDecodePolicy)policy;
+
 /**
  Return the decoded and probably scaled down image by the provided image. If the image pixels bytes size large than the limit bytes, will try to scale down. Or just works as `decodedImageWithImage:`, never scale up.
  @warning You should not pass too small bytes, the suggestion value should be larger than 1MB. Even we use Tile Decoding to avoid OOM, however, small bytes will consume much more CPU time because we need to iterate more times to draw each tile.
 
  @param image The image to be decoded and scaled down
  @param bytes The limit bytes size. Provide 0 to use the build-in limit.
+ @note This translate to `decodedAndScaledDownImageWithImage:limitBytes:policy:` with automatic policy
  @return The decoded and probably scaled down image
  */
 + (UIImage * _Nullable)decodedAndScaledDownImageWithImage:(UIImage * _Nullable)image limitBytes:(NSUInteger)bytes;
 
+/**
+ Return the decoded and probably scaled down image by the provided image. If the image pixels bytes size large than the limit bytes, will try to scale down. Or just works as `decodedImageWithImage:`, never scale up.
+ @warning You should not pass too small bytes, the suggestion value should be larger than 1MB. Even we use Tile Decoding to avoid OOM, however, small bytes will consume much more CPU time because we need to iterate more times to draw each tile.
+
+ @param image The image to be decoded and scaled down
+ @param bytes The limit bytes size. Provide 0 to use the build-in limit.
+ @param policy The force decode policy to decode image, will effect the check whether input image need decode
+ @return The decoded and probably scaled down image
+ */
++ (UIImage * _Nullable)decodedAndScaledDownImageWithImage:(UIImage * _Nullable)image limitBytes:(NSUInteger)bytes policy:(SDImageForceDecodePolicy)policy;
+
 /**
  Control the default force decode solution. Available solutions  in `SDImageCoderDecodeSolution`.
  @note Defaults to `SDImageCoderDecodeSolutionAutomatic`, which prefers to use UIKit for JPEG/HEIF, and fallback on CoreGraphics. If you want control on your hand, set the other solution.

+ 245 - 55
Pods/SDWebImage/SDWebImage/Core/SDImageCoderHelper.m

@@ -17,11 +17,10 @@
 #import "SDInternalMacros.h"
 #import "SDGraphicsImageRenderer.h"
 #import "SDInternalMacros.h"
+#import "SDDeviceHelper.h"
 #import <Accelerate/Accelerate.h>
 
-static inline size_t SDByteAlign(size_t size, size_t alignment) {
-    return ((size + (alignment - 1)) / alignment) * alignment;
-}
+#define kCGColorSpaceDeviceRGB CFSTR("kCGColorSpaceDeviceRGB")
 
 #if SD_UIKIT
 static inline UIImage *SDImageDecodeUIKit(UIImage *image) {
@@ -72,6 +71,42 @@ static inline BOOL SDImageSupportsHardwareHEVCDecoder(void) {
 }
 #endif
 
+static UIImage * _Nonnull SDImageGetAlphaDummyImage(void) {
+    static dispatch_once_t onceToken;
+    static UIImage *dummyImage;
+    dispatch_once(&onceToken, ^{
+        SDGraphicsImageRendererFormat *format = [SDGraphicsImageRendererFormat preferredFormat];
+        format.scale = 1;
+        format.opaque = NO;
+        CGSize size = CGSizeMake(1, 1);
+        SDGraphicsImageRenderer *renderer = [[SDGraphicsImageRenderer alloc] initWithSize:size format:format];
+        dummyImage = [renderer imageWithActions:^(CGContextRef  _Nonnull context) {
+            CGContextSetFillColorWithColor(context, UIColor.redColor.CGColor);
+            CGContextFillRect(context, CGRectMake(0, 0, size.width, size.height));
+        }];
+        NSCAssert(dummyImage, @"The sample alpha image (1x1 pixels) returns nil, OS bug ?");
+    });
+    return dummyImage;
+}
+
+static UIImage * _Nonnull SDImageGetNonAlphaDummyImage(void) {
+    static dispatch_once_t onceToken;
+    static UIImage *dummyImage;
+    dispatch_once(&onceToken, ^{
+        SDGraphicsImageRendererFormat *format = [SDGraphicsImageRendererFormat preferredFormat];
+        format.scale = 1;
+        format.opaque = YES;
+        CGSize size = CGSizeMake(1, 1);
+        SDGraphicsImageRenderer *renderer = [[SDGraphicsImageRenderer alloc] initWithSize:size format:format];
+        dummyImage = [renderer imageWithActions:^(CGContextRef  _Nonnull context) {
+            CGContextSetFillColorWithColor(context, UIColor.redColor.CGColor);
+            CGContextFillRect(context, CGRectMake(0, 0, size.width, size.height));
+        }];
+        NSCAssert(dummyImage, @"The sample non-alpha image (1x1 pixels) returns nil, OS bug ?");
+    });
+    return dummyImage;
+}
+
 static SDImageCoderDecodeSolution kDefaultDecodeSolution = SDImageCoderDecodeSolutionAutomatic;
 
 static const size_t kBytesPerPixel = 4;
@@ -258,11 +293,82 @@ static const CGFloat kDestSeemOverlap = 2.0f;   // the numbers of pixels to over
     static CGColorSpaceRef colorSpace;
     static dispatch_once_t onceToken;
     dispatch_once(&onceToken, ^{
+#if SD_MAC
+        NSScreen *mainScreen = nil;
+        if (@available(macOS 10.12, *)) {
+            mainScreen = [NSScreen mainScreen];
+        } else {
+            mainScreen = [NSScreen screens].firstObject;
+        }
+        colorSpace = mainScreen.colorSpace.CGColorSpace;
+#else
         colorSpace = CGColorSpaceCreateWithName(kCGColorSpaceSRGB);
+#endif
     });
     return colorSpace;
 }
 
++ (SDImagePixelFormat)preferredPixelFormat:(BOOL)containsAlpha {
+    CGImageRef cgImage;
+    if (containsAlpha) {
+        cgImage = SDImageGetAlphaDummyImage().CGImage;
+    } else {
+        cgImage = SDImageGetNonAlphaDummyImage().CGImage;
+    }
+    CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(cgImage);
+    size_t bitsPerPixel = 8;
+    if (SD_OPTIONS_CONTAINS(bitmapInfo, kCGBitmapFloatComponents)) {
+        bitsPerPixel = 16;
+    }
+    size_t components = 4; // Hardcode now
+    // https://github.com/path/FastImageCache#byte-alignment
+    // A properly aligned bytes-per-row value must be a multiple of 8 pixels × bytes per pixel.
+    size_t alignment = (bitsPerPixel / 8) * components * 8;
+    SDImagePixelFormat pixelFormat = {
+        .bitmapInfo = bitmapInfo,
+        .alignment = alignment
+    };
+    return pixelFormat;
+}
+
++ (BOOL)CGImageIsHardwareSupported:(CGImageRef)cgImage {
+    BOOL supported = YES;
+    // 1. Check byte alignment
+    size_t bytesPerRow = CGImageGetBytesPerRow(cgImage);
+    BOOL hasAlpha = [self CGImageContainsAlpha:cgImage];
+    SDImagePixelFormat pixelFormat = [self preferredPixelFormat:hasAlpha];
+    if (SDByteAlign(bytesPerRow, pixelFormat.alignment) == bytesPerRow) {
+        // byte aligned, OK
+        supported &= YES;
+    } else {
+        // not aligned
+        supported &= NO;
+    }
+    if (!supported) return supported;
+    
+    // 2. Check color space
+    CGColorSpaceRef colorSpace = CGImageGetColorSpace(cgImage);
+    CGColorSpaceRef perferredColorSpace = [self colorSpaceGetDeviceRGB];
+    if (colorSpace == perferredColorSpace) {
+        return supported;
+    } else {
+        if (@available(iOS 10.0, tvOS 10.0, macOS 10.6, watchOS 3.0, *)) {
+            NSString *colorspaceName = (__bridge_transfer NSString *)CGColorSpaceCopyName(colorSpace);
+            // Seems sRGB/deviceRGB always supported, P3 not always
+            if ([colorspaceName isEqualToString:(__bridge NSString *)kCGColorSpaceDeviceRGB]
+                || [colorspaceName isEqualToString:(__bridge NSString *)kCGColorSpaceSRGB]) {
+                supported &= YES;
+            } else {
+                supported &= NO;
+            }
+            return supported;
+        } else {
+            // Fallback on earlier versions
+            return supported;
+        }
+    }
+}
+
 + (BOOL)CGImageContainsAlpha:(CGImageRef)cgImage {
     if (!cgImage) {
         return NO;
@@ -307,16 +413,8 @@ static const CGFloat kDestSeemOverlap = 2.0f;   // the numbers of pixels to over
     BOOL hasAlpha = [self CGImageContainsAlpha:cgImage];
     // kCGImageAlphaNone is not supported in CGBitmapContextCreate.
     // Check #3330 for more detail about why this bitmap is choosen.
-    CGBitmapInfo bitmapInfo;
-    if (hasAlpha) {
-        // iPhone GPU prefer to use BGRA8888, see: https://forums.raywenderlich.com/t/why-mtlpixelformat-bgra8unorm/53489
-        // BGRA8888
-        bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
-    } else {
-        // BGR888 previously works on iOS 8~iOS 14, however, iOS 15+ will result a black image. FB9958017
-        // RGB888
-        bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipLast;
-    }
+    // From v5.17.0, use runtime detection of bitmap info instead of hardcode.
+    CGBitmapInfo bitmapInfo = [SDImageCoderHelper preferredPixelFormat:hasAlpha].bitmapInfo;
     CGContextRef context = CGBitmapContextCreate(NULL, newWidth, newHeight, 8, 0, [self colorSpaceGetDeviceRGB], bitmapInfo);
     if (!context) {
         return NULL;
@@ -336,53 +434,110 @@ static const CGFloat kDestSeemOverlap = 2.0f;   // the numbers of pixels to over
     if (!cgImage) {
         return NULL;
     }
+    if (size.width == 0 || size.height == 0) {
+        return NULL;
+    }
     size_t width = CGImageGetWidth(cgImage);
     size_t height = CGImageGetHeight(cgImage);
     if (width == size.width && height == size.height) {
+        // Already same size
         CGImageRetain(cgImage);
         return cgImage;
     }
+    size_t bitsPerComponent = CGImageGetBitsPerComponent(cgImage);
+    if (bitsPerComponent != 8 && bitsPerComponent != 16 && bitsPerComponent != 32) {
+        // Unsupported
+        return NULL;
+    }
+    size_t bitsPerPixel = CGImageGetBitsPerPixel(cgImage);
+    CGColorSpaceRef colorSpace = CGImageGetColorSpace(cgImage);
+    CGColorRenderingIntent renderingIntent = CGImageGetRenderingIntent(cgImage);
+    CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(cgImage);
+    CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
+    CGImageByteOrderInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
+    CGBitmapInfo alphaBitmapInfo = (uint32_t)byteOrderInfo;
     
+    // Input need to convert with alpha
+    if (alphaInfo == kCGImageAlphaNone) {
+        // Convert RGB8/16/F -> ARGB8/16/F
+        alphaBitmapInfo |= kCGImageAlphaFirst;
+    } else {
+        alphaBitmapInfo |= alphaInfo;
+    }
+    uint32_t components;
+    if (alphaInfo == kCGImageAlphaOnly) {
+        // Alpha only, simple to 1 channel
+        components = 1;
+    } else {
+        components = 4;
+    }
+    if (SD_OPTIONS_CONTAINS(bitmapInfo, kCGBitmapFloatComponents)) {
+        // Keep float components
+        alphaBitmapInfo |= kCGBitmapFloatComponents;
+    }
     __block vImage_Buffer input_buffer = {}, output_buffer = {};
     @onExit {
         if (input_buffer.data) free(input_buffer.data);
         if (output_buffer.data) free(output_buffer.data);
     };
-    BOOL hasAlpha = [self CGImageContainsAlpha:cgImage];
-    // kCGImageAlphaNone is not supported in CGBitmapContextCreate.
-    // Check #3330 for more detail about why this bitmap is choosen.
-    CGBitmapInfo bitmapInfo;
-    if (hasAlpha) {
-        // iPhone GPU prefer to use BGRA8888, see: https://forums.raywenderlich.com/t/why-mtlpixelformat-bgra8unorm/53489
-        // BGRA8888
-        bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
-    } else {
-        // BGR888 previously works on iOS 8~iOS 14, however, iOS 15+ will result a black image. FB9958017
-        // RGB888
-        bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipLast;
-    }
+    // Always provide alpha channel
     vImage_CGImageFormat format = (vImage_CGImageFormat) {
-        .bitsPerComponent = 8,
-        .bitsPerPixel = 32,
-        .colorSpace = NULL,
-        .bitmapInfo = bitmapInfo,
+        .bitsPerComponent = (uint32_t)bitsPerComponent,
+        .bitsPerPixel = (uint32_t)bitsPerComponent * components,
+        .colorSpace = colorSpace,
+        .bitmapInfo = alphaBitmapInfo,
         .version = 0,
         .decode = NULL,
-        .renderingIntent = CGImageGetRenderingIntent(cgImage)
+        .renderingIntent = renderingIntent
     };
-    
-    vImage_Error a_ret = vImageBuffer_InitWithCGImage(&input_buffer, &format, NULL, cgImage, kvImageNoFlags);
-    if (a_ret != kvImageNoError) return NULL;
-    output_buffer.width = MAX(size.width, 0);
-    output_buffer.height = MAX(size.height, 0);
-    output_buffer.rowBytes = SDByteAlign(output_buffer.width * 4, 64);
-    output_buffer.data = malloc(output_buffer.rowBytes * output_buffer.height);
+    // input
+    vImage_Error ret = vImageBuffer_InitWithCGImage(&input_buffer, &format, NULL, cgImage, kvImageNoFlags);
+    if (ret != kvImageNoError) return NULL;
+    // output
+    vImageBuffer_Init(&output_buffer, size.height, size.width, (uint32_t)bitsPerComponent * components, kvImageNoFlags);
     if (!output_buffer.data) return NULL;
     
-    vImage_Error ret = vImageScale_ARGB8888(&input_buffer, &output_buffer, NULL, kvImageHighQualityResampling);
+    if (components == 4) {
+        if (bitsPerComponent == 32) {
+            ret = vImageScale_ARGBFFFF(&input_buffer, &output_buffer, NULL, kvImageHighQualityResampling);
+        } else if (bitsPerComponent == 16) {
+            ret = vImageScale_ARGB16U(&input_buffer, &output_buffer, NULL, kvImageHighQualityResampling);
+        } else if (bitsPerComponent == 8) {
+            ret = vImageScale_ARGB8888(&input_buffer, &output_buffer, NULL, kvImageHighQualityResampling);
+        }
+    } else {
+        if (bitsPerComponent == 32) {
+            ret = vImageScale_PlanarF(&input_buffer, &output_buffer, NULL, kvImageHighQualityResampling);
+        } else if (bitsPerComponent == 16) {
+            ret = vImageScale_Planar16U(&input_buffer, &output_buffer, NULL, kvImageHighQualityResampling);
+        } else if (bitsPerComponent == 8) {
+            ret = vImageScale_Planar8(&input_buffer, &output_buffer, NULL, kvImageHighQualityResampling);
+        }
+    }
     if (ret != kvImageNoError) return NULL;
     
-    CGImageRef outputImage = vImageCreateCGImageFromBuffer(&output_buffer, &format, NULL, NULL, kvImageNoFlags, &ret);
+    // Convert back to non-alpha for RGB input to preserve pixel format
+    if (alphaInfo == kCGImageAlphaNone) {
+        // in-place, no extra allocation
+        if (bitsPerComponent == 32) {
+            ret = vImageConvert_ARGBFFFFtoRGBFFF(&output_buffer, &output_buffer, kvImageNoFlags);
+        } else if (bitsPerComponent == 16) {
+            ret = vImageConvert_ARGB16UtoRGB16U(&output_buffer, &output_buffer, kvImageNoFlags);
+        } else if (bitsPerComponent == 8) {
+            ret = vImageConvert_ARGB8888toRGB888(&output_buffer, &output_buffer, kvImageNoFlags);
+        }
+        if (ret != kvImageNoError) return NULL;
+    }
+    vImage_CGImageFormat output_format = (vImage_CGImageFormat) {
+        .bitsPerComponent = (uint32_t)bitsPerComponent,
+        .bitsPerPixel = (uint32_t)bitsPerPixel,
+        .colorSpace = colorSpace,
+        .bitmapInfo = bitmapInfo,
+        .version = 0,
+        .decode = NULL,
+        .renderingIntent = renderingIntent
+    };
+    CGImageRef outputImage = vImageCreateCGImageFromBuffer(&output_buffer, &output_format, NULL, NULL, kvImageNoFlags, &ret);
     if (ret != kvImageNoError) {
         CGImageRelease(outputImage);
         return NULL;
@@ -428,8 +583,25 @@ static const CGFloat kDestSeemOverlap = 2.0f;   // the numbers of pixels to over
     return CGSizeMake(resultWidth, resultHeight);
 }
 
++ (CGSize)scaledSizeWithImageSize:(CGSize)imageSize limitBytes:(NSUInteger)limitBytes bytesPerPixel:(NSUInteger)bytesPerPixel frameCount:(NSUInteger)frameCount {
+    if (CGSizeEqualToSize(imageSize, CGSizeZero)) return CGSizeMake(1, 1);
+    NSUInteger totalFramePixelSize = limitBytes / bytesPerPixel / (frameCount ?: 1);
+    CGFloat ratio = imageSize.height / imageSize.width;
+    CGFloat width = sqrt(totalFramePixelSize / ratio);
+    CGFloat height = width * ratio;
+    width = MAX(1, floor(width));
+    height = MAX(1, floor(height));
+    CGSize size = CGSizeMake(width, height);
+    
+    return size;
+}
+
 + (UIImage *)decodedImageWithImage:(UIImage *)image {
-    if (![self shouldDecodeImage:image]) {
+    return [self decodedImageWithImage:image policy:SDImageForceDecodePolicyAutomatic];
+}
+
++ (UIImage *)decodedImageWithImage:(UIImage *)image policy:(SDImageForceDecodePolicy)policy {
+    if (![self shouldDecodeImage:image policy:policy]) {
         return image;
     }
     
@@ -486,7 +658,11 @@ static const CGFloat kDestSeemOverlap = 2.0f;   // the numbers of pixels to over
 }
 
 + (UIImage *)decodedAndScaledDownImageWithImage:(UIImage *)image limitBytes:(NSUInteger)bytes {
-    if (![self shouldDecodeImage:image]) {
+    return [self decodedAndScaledDownImageWithImage:image limitBytes:bytes policy:SDImageForceDecodePolicyAutomatic];
+}
+
++ (UIImage *)decodedAndScaledDownImageWithImage:(UIImage *)image limitBytes:(NSUInteger)bytes policy:(SDImageForceDecodePolicy)policy {
+    if (![self shouldDecodeImage:image policy:policy]) {
         return image;
     }
     
@@ -550,16 +726,8 @@ static const CGFloat kDestSeemOverlap = 2.0f;   // the numbers of pixels to over
         
         // kCGImageAlphaNone is not supported in CGBitmapContextCreate.
         // Check #3330 for more detail about why this bitmap is choosen.
-        CGBitmapInfo bitmapInfo;
-        if (hasAlpha) {
-            // iPhone GPU prefer to use BGRA8888, see: https://forums.raywenderlich.com/t/why-mtlpixelformat-bgra8unorm/53489
-            // BGRA8888
-            bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
-        } else {
-            // BGR888 previously works on iOS 8~iOS 14, however, iOS 15+ will result a black image. FB9958017
-            // RGB888
-            bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipLast;
-        }
+        // From v5.17.0, use runtime detection of bitmap info instead of hardcode.
+        CGBitmapInfo bitmapInfo = [SDImageCoderHelper preferredPixelFormat:hasAlpha].bitmapInfo;
         CGContextRef destContext = CGBitmapContextCreate(NULL,
                                                          destResolution.width,
                                                          destResolution.height,
@@ -618,7 +786,7 @@ static const CGFloat kDestSeemOverlap = 2.0f;   // the numbers of pixels to over
             sourceTileImageRef = CGImageCreateWithImageInRect( sourceImageRef, sourceTile );
             if( y == iterations - 1 && remainder ) {
                 float dify = destTile.size.height;
-                destTile.size.height = CGImageGetHeight( sourceTileImageRef ) * imageScale;
+                destTile.size.height = CGImageGetHeight( sourceTileImageRef ) * imageScale + kDestSeemOverlap;
                 dify -= destTile.size.height;
                 destTile.origin.y = MIN(0, destTile.origin.y + dify);
             }
@@ -733,11 +901,15 @@ static const CGFloat kDestSeemOverlap = 2.0f;   // the numbers of pixels to over
 #endif
 
 #pragma mark - Helper Function
-+ (BOOL)shouldDecodeImage:(nullable UIImage *)image {
++ (BOOL)shouldDecodeImage:(nullable UIImage *)image policy:(SDImageForceDecodePolicy)policy {
     // Prevent "CGBitmapContextCreateImage: invalid context 0x0" error
     if (image == nil) {
         return NO;
     }
+    // Check policy (never)
+    if (policy == SDImageForceDecodePolicyNever) {
+        return NO;
+    }
     // Avoid extra decode
     if (image.sd_isDecoded) {
         return NO;
@@ -750,7 +922,25 @@ static const CGFloat kDestSeemOverlap = 2.0f;   // the numbers of pixels to over
     if (image.sd_isVector) {
         return NO;
     }
-    
+    // Check policy (always)
+    if (policy == SDImageForceDecodePolicyAlways) {
+        return YES;
+    } else {
+        // Check policy (automatic)
+        CGImageRef cgImage = image.CGImage;
+        if (cgImage) {
+            CFStringRef uttype = CGImageGetUTType(cgImage);
+            if (uttype) {
+                // Only ImageIO can set `com.apple.ImageIO.imageSourceTypeIdentifier`
+                return YES;
+            } else {
+                // Now, let's check if the CGImage is hardware supported (not byte-aligned will cause extra copy)
+                BOOL isSupported = [SDImageCoderHelper CGImageIsHardwareSupported:cgImage];
+                return !isSupported;
+            }
+        }
+    }
+
     return YES;
 }
 

+ 4 - 5
Pods/SDWebImage/SDWebImage/Core/SDImageGraphics.m

@@ -32,14 +32,13 @@ static CGContextRef SDCGContextCreateBitmapContext(CGSize size, BOOL opaque, CGF
     CGColorSpaceRef space = [SDImageCoderHelper colorSpaceGetDeviceRGB];
     // kCGImageAlphaNone is not supported in CGBitmapContextCreate.
     // Check #3330 for more detail about why this bitmap is choosen.
+    // From v5.17.0, use runtime detection of bitmap info instead of hardcode.
+    // However, macOS's runtime detection will also call this function, cause recursive, so still hardcode here
     CGBitmapInfo bitmapInfo;
     if (!opaque) {
-        // iPhone GPU prefer to use BGRA8888, see: https://forums.raywenderlich.com/t/why-mtlpixelformat-bgra8unorm/53489
-        // BGRA8888
-        bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
+        // [NSImage imageWithSize:flipped:drawingHandler:] returns float(16-bits) RGBA8888 on alpha image, which we don't need
+        bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast;
     } else {
-        // BGR888 previously works on iOS 8~iOS 14, however, iOS 15+ will result a black image. FB9958017
-        // RGB888
         bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaNoneSkipLast;
     }
     CGContextRef context = CGBitmapContextCreate(NULL, width, height, 8, 0, space, bitmapInfo);

+ 64 - 6
Pods/SDWebImage/SDWebImage/Core/SDImageIOAnimatedCoder.m

@@ -27,6 +27,8 @@ static CGImageSourceRef (*SDCGImageGetImageSource)(CGImageRef);
 
 // Specify File Size for lossy format encoding, like JPEG
 static NSString * kSDCGImageDestinationRequestedFileSize = @"kCGImageDestinationRequestedFileSize";
+// Avoid ImageIO translate JFIF orientation to EXIF orientation which cause bug because returned CGImage already apply the orientation transform
+static NSString * kSDCGImageSourceSkipMetadata = @"kCGImageSourceSkipMetadata";
 
 // This strip the un-wanted CGImageProperty, like the internal CGImageSourceRef in iOS 15+
 // However, CGImageCreateCopy still keep those CGImageProperty, not suit for our use case
@@ -70,6 +72,7 @@ static CGImageRef __nullable SDCGImageCreateCopy(CGImageRef cg_nullable image) {
     BOOL _finished;
     BOOL _preserveAspectRatio;
     CGSize _thumbnailSize;
+    NSUInteger _limitBytes;
     BOOL _lazyDecode;
 }
 
@@ -231,7 +234,7 @@ static CGImageRef __nullable SDCGImageCreateCopy(CGImageRef cg_nullable image) {
         }
     }
     // Parse the image properties
-    NSDictionary *properties = (__bridge_transfer NSDictionary *)CGImageSourceCopyPropertiesAtIndex(source, index, NULL);
+    NSDictionary *properties = (__bridge_transfer NSDictionary *)CGImageSourceCopyPropertiesAtIndex(source, index, (__bridge CFDictionaryRef)@{kSDCGImageSourceSkipMetadata : @(YES)});
     CGFloat pixelWidth = [properties[(__bridge NSString *)kCGImagePropertyPixelWidth] doubleValue];
     CGFloat pixelHeight = [properties[(__bridge NSString *)kCGImagePropertyPixelHeight] doubleValue];
     CGImagePropertyOrientation exifOrientation = (CGImagePropertyOrientation)[properties[(__bridge NSString *)kCGImagePropertyOrientation] unsignedIntegerValue];
@@ -370,10 +373,16 @@ static CGImageRef __nullable SDCGImageCreateCopy(CGImageRef cg_nullable image) {
         lazyDecode = lazyDecodeValue.boolValue;
     }
     
+    NSUInteger limitBytes = 0;
+    NSNumber *limitBytesValue = options[SDImageCoderDecodeScaleDownLimitBytes];
+    if (limitBytesValue != nil) {
+        limitBytes = limitBytesValue.unsignedIntegerValue;
+    }
+    
 #if SD_MAC
     // If don't use thumbnail, prefers the built-in generation of frames (GIF/APNG)
     // Which decode frames in time and reduce memory usage
-    if (thumbnailSize.width == 0 || thumbnailSize.height == 0) {
+    if (limitBytes == 0 && (thumbnailSize.width == 0 || thumbnailSize.height == 0)) {
         SDAnimatedImageRep *imageRep = [[SDAnimatedImageRep alloc] initWithData:data];
         if (imageRep) {
             NSSize size = NSMakeSize(imageRep.pixelsWide / scale, imageRep.pixelsHigh / scale);
@@ -415,16 +424,30 @@ static CGImageRef __nullable SDCGImageCreateCopy(CGImageRef cg_nullable image) {
         return nil;
     }
     
-    size_t count = CGImageSourceGetCount(source);
+    size_t frameCount = CGImageSourceGetCount(source);
     UIImage *animatedImage;
     
+    // Parse the image properties
+    NSDictionary *properties = (__bridge_transfer NSDictionary *)CGImageSourceCopyPropertiesAtIndex(source, 0, NULL);
+    size_t width = [properties[(__bridge NSString *)kCGImagePropertyPixelWidth] doubleValue];
+    size_t height = [properties[(__bridge NSString *)kCGImagePropertyPixelHeight] doubleValue];
+    // Scale down to limit bytes if need
+    if (limitBytes > 0) {
+        // Hack since ImageIO public API (not CGImageDecompressor/CMPhoto) always return back RGBA8888 CGImage
+        CGSize imageSize = CGSizeMake(width, height);
+        CGSize framePixelSize = [SDImageCoderHelper scaledSizeWithImageSize:imageSize limitBytes:limitBytes bytesPerPixel:4 frameCount:frameCount];
+        // Override thumbnail size
+        thumbnailSize = framePixelSize;
+        preserveAspectRatio = YES;
+    }
+    
     BOOL decodeFirstFrame = [options[SDImageCoderDecodeFirstFrameOnly] boolValue];
-    if (decodeFirstFrame || count <= 1) {
+    if (decodeFirstFrame || frameCount <= 1) {
         animatedImage = [self.class createFrameAtIndex:0 source:source scale:scale preserveAspectRatio:preserveAspectRatio thumbnailSize:thumbnailSize lazyDecode:lazyDecode animatedImage:NO];
     } else {
-        NSMutableArray<SDImageFrame *> *frames = [NSMutableArray arrayWithCapacity:count];
+        NSMutableArray<SDImageFrame *> *frames = [NSMutableArray arrayWithCapacity:frameCount];
         
-        for (size_t i = 0; i < count; i++) {
+        for (size_t i = 0; i < frameCount; i++) {
             UIImage *image = [self.class createFrameAtIndex:i source:source scale:scale preserveAspectRatio:preserveAspectRatio thumbnailSize:thumbnailSize lazyDecode:lazyDecode animatedImage:NO];
             if (!image) {
                 continue;
@@ -481,6 +504,12 @@ static CGImageRef __nullable SDCGImageCreateCopy(CGImageRef cg_nullable image) {
             preserveAspectRatio = preserveAspectRatioValue.boolValue;
         }
         _preserveAspectRatio = preserveAspectRatio;
+        NSUInteger limitBytes = 0;
+        NSNumber *limitBytesValue = options[SDImageCoderDecodeScaleDownLimitBytes];
+        if (limitBytesValue != nil) {
+            limitBytes = limitBytesValue.unsignedIntegerValue;
+        }
+        _limitBytes = limitBytes;
         BOOL lazyDecode = NO; // Defaults NO for animated image coder
         NSNumber *lazyDecodeValue = options[SDImageCoderDecodeUseLazyDecoding];
         if (lazyDecodeValue != nil) {
@@ -524,6 +553,16 @@ static CGImageRef __nullable SDCGImageCreateCopy(CGImageRef cg_nullable image) {
     // For animated image progressive decoding because the frame count and duration may be changed.
     [self scanAndCheckFramesValidWithImageSource:_imageSource];
     SD_UNLOCK(_lock);
+    
+    // Scale down to limit bytes if need
+    if (_limitBytes > 0) {
+        // Hack since ImageIO public API (not CGImageDecompressor/CMPhoto) always return back RGBA8888 CGImage
+        CGSize imageSize = CGSizeMake(_width, _height);
+        CGSize framePixelSize = [SDImageCoderHelper scaledSizeWithImageSize:imageSize limitBytes:_limitBytes bytesPerPixel:4 frameCount:_frameCount];
+        // Override thumbnail size
+        _thumbnailSize = framePixelSize;
+        _preserveAspectRatio = YES;
+    }
 }
 
 - (UIImage *)incrementalDecodedImageWithOptions:(SDImageCoderOptions *)options {
@@ -710,6 +749,25 @@ static CGImageRef __nullable SDCGImageCreateCopy(CGImageRef cg_nullable image) {
             preserveAspectRatio = preserveAspectRatioValue.boolValue;
         }
         _preserveAspectRatio = preserveAspectRatio;
+        NSUInteger limitBytes = 0;
+        NSNumber *limitBytesValue = options[SDImageCoderDecodeScaleDownLimitBytes];
+        if (limitBytesValue != nil) {
+            limitBytes = limitBytesValue.unsignedIntegerValue;
+        }
+        _limitBytes = limitBytes;
+        // Parse the image properties
+        NSDictionary *properties = (__bridge_transfer NSDictionary *)CGImageSourceCopyPropertiesAtIndex(imageSource, 0, NULL);
+        _width = [properties[(__bridge NSString *)kCGImagePropertyPixelWidth] doubleValue];
+        _height = [properties[(__bridge NSString *)kCGImagePropertyPixelHeight] doubleValue];
+        // Scale down to limit bytes if need
+        if (_limitBytes > 0) {
+            // Hack since ImageIO public API (not CGImageDecompressor/CMPhoto) always return back RGBA8888 CGImage
+            CGSize imageSize = CGSizeMake(_width, _height);
+            CGSize framePixelSize = [SDImageCoderHelper scaledSizeWithImageSize:imageSize limitBytes:_limitBytes bytesPerPixel:4 frameCount:_frameCount];
+            // Override thumbnail size
+            _thumbnailSize = framePixelSize;
+            _preserveAspectRatio = YES;
+        }
         BOOL lazyDecode = NO; // Defaults NO for animated image coder
         NSNumber *lazyDecodeValue = options[SDImageCoderDecodeUseLazyDecoding];
         if (lazyDecodeValue != nil) {

+ 22 - 14
Pods/SDWebImage/SDWebImage/Core/SDImageLoader.m

@@ -74,15 +74,19 @@ UIImage * _Nullable SDImageLoaderDecodeImageData(NSData * _Nonnull imageData, NS
         image = [imageCoder decodedImageWithData:imageData options:coderOptions];
     }
     if (image) {
-        BOOL shouldDecode = !SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage);
-        BOOL lazyDecode = [coderOptions[SDImageCoderDecodeUseLazyDecoding] boolValue];
-        if (lazyDecode) {
-            // lazyDecode = NO means we should not forceDecode, highest priority
-            shouldDecode = NO;
+        SDImageForceDecodePolicy policy = SDImageForceDecodePolicyAutomatic;
+        NSNumber *polivyValue = context[SDWebImageContextImageForceDecodePolicy];
+        if (polivyValue != nil) {
+            policy = polivyValue.unsignedIntegerValue;
         }
-        if (shouldDecode) {
-            image = [SDImageCoderHelper decodedImageWithImage:image];
+        // TODO: Deprecated, remove in SD 6.0...
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+        if (SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage)) {
+            policy = SDImageForceDecodePolicyNever;
         }
+#pragma clang diagnostic pop
+        image = [SDImageCoderHelper decodedImageWithImage:image policy:policy];
         // assign the decode options, to let manager check whether to re-decode if needed
         image.sd_decodeOptions = coderOptions;
     }
@@ -151,15 +155,19 @@ UIImage * _Nullable SDImageLoaderDecodeProgressiveImageData(NSData * _Nonnull im
         image = [progressiveCoder incrementalDecodedImageWithOptions:coderOptions];
     }
     if (image) {
-        BOOL shouldDecode = !SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage);
-        BOOL lazyDecode = [coderOptions[SDImageCoderDecodeUseLazyDecoding] boolValue];
-        if (lazyDecode) {
-            // lazyDecode = NO means we should not forceDecode, highest priority
-            shouldDecode = NO;
+        SDImageForceDecodePolicy policy = SDImageForceDecodePolicyAutomatic;
+        NSNumber *polivyValue = context[SDWebImageContextImageForceDecodePolicy];
+        if (polivyValue != nil) {
+            policy = polivyValue.unsignedIntegerValue;
         }
-        if (shouldDecode) {
-            image = [SDImageCoderHelper decodedImageWithImage:image];
+        // TODO: Deprecated, remove in SD 6.0...
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
+        if (SD_OPTIONS_CONTAINS(options, SDWebImageAvoidDecodeImage)) {
+            policy = SDImageForceDecodePolicyNever;
         }
+#pragma clang diagnostic pop
+        image = [SDImageCoderHelper decodedImageWithImage:image policy:policy];
         // assign the decode options, to let manager check whether to re-decode if needed
         image.sd_decodeOptions = coderOptions;
         // mark the image as progressive (completed one are not mark as progressive)

+ 6 - 0
Pods/SDWebImage/SDWebImage/Core/SDWebImageCompat.h

@@ -47,6 +47,12 @@
     #define SD_WATCH 0
 #endif
 
+// Supports Xcode 14 to suppress warning
+#ifdef TARGET_OS_VISION
+#if TARGET_OS_VISION
+    #define SD_VISION 1
+#endif
+#endif
 
 #if SD_MAC
     #import <AppKit/AppKit.h>

+ 40 - 5
Pods/SDWebImage/SDWebImage/Core/SDWebImageDefine.h

@@ -107,6 +107,7 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageOptions) {
      * of the placeholder image until after the image has finished loading.
      * @note This is used to treate placeholder as an **Error Placeholder** but not **Loading Placeholder** by defaults. if the image loading is cancelled or error, the placeholder will be always set.
      * @note Therefore, if you want both **Error Placeholder** and **Loading Placeholder** exist, use `SDWebImageAvoidAutoSetImage` to manually set the two placeholders and final loaded image by your hand depends on loading result.
+     * @note This options is UI level options, has no usage on ImageManager or other components.
      */
     SDWebImageDelayPlaceholder = 1 << 8,
     
@@ -120,6 +121,7 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageOptions) {
      * By default, image is added to the imageView after download. But in some cases, we want to
      * have the hand before setting the image (apply a filter or add it with cross-fade animation for instance)
      * Use this flag if you want to manually set the image in the completion when success
+     * @note This options is UI level options, has no usage on ImageManager or other components.
      */
     SDWebImageAvoidAutoSetImage = 1 << 10,
     
@@ -127,9 +129,10 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageOptions) {
      * By default, images are decoded respecting their original size.
      * This flag will scale down the images to a size compatible with the constrained memory of devices.
      * To control the limit memory bytes, check `SDImageCoderHelper.defaultScaleDownLimitBytes` (Defaults to 60MB on iOS)
-     * This will actually translate to use context option `.imageThumbnailPixelSize` from v5.5.0 (Defaults to (3966, 3966) on iOS). Previously does not.
-     * This flags effect the progressive and animated images as well from v5.5.0. Previously does not.
-     * @note If you need detail controls, it's better to use context option `imageThumbnailPixelSize` and `imagePreserveAspectRatio` instead.
+     * (from 5.16.0) This will actually translate to use context option `SDWebImageContextImageScaleDownLimitBytes`, which check and calculate the thumbnail pixel size occupied small than limit bytes (including animated image)
+     * (from 5.5.0) This flags effect the progressive and animated images as well
+     * @note If you need detail controls, it's better to use context option `imageScaleDownBytes` instead.
+     * @warning This does not effect the cache key. So which means, this will effect the global cache even next time you query without this option. Pay attention when you use this on global options (It's always recommended to use request-level option for different pipeline)
      */
     SDWebImageScaleDownLargeImages = 1 << 11,
     
@@ -164,6 +167,7 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageOptions) {
     /**
      * By default, when you use `SDWebImageTransition` to do some view transition after the image load finished, this transition is only applied for image when the callback from manager is asynchronous (from network, or disk cache query)
      * This mask can force to apply view transition for any cases, like memory cache query, or sync disk cache query.
+     * @note This options is UI level options, has no usage on ImageManager or other components.
      */
     SDWebImageForceTransition = 1 << 17,
     
@@ -171,8 +175,9 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageOptions) {
      * By default, we will decode the image in the background during cache query and download from the network. This can help to improve performance because when rendering image on the screen, it need to be firstly decoded. But this happen on the main queue by Core Animation.
      * However, this process may increase the memory usage as well. If you are experiencing an issue due to excessive memory consumption, This flag can prevent decode the image.
      * @note 5.14.0 introduce `SDImageCoderDecodeUseLazyDecoding`, use that for better control from codec, instead of post-processing. Which acts the similar like this option but works for SDAnimatedImage as well (this one does not)
+     * @deprecated Deprecated in v5.17.0, if you don't want force-decode, pass [.imageForceDecodePolicy] = [SDImageForceDecodePolicy.never] in context option
      */
-    SDWebImageAvoidDecodeImage = 1 << 18,
+    SDWebImageAvoidDecodeImage API_DEPRECATED("Use SDWebImageContextImageForceDecodePolicy instead", macos(10.10, 10.10), ios(8.0, 8.0), tvos(9.0, 9.0), watchos(2.0, 2.0)) = 1 << 18,
     
     /**
      * By default, we decode the animated image. This flag can force decode the first frame only and produce the static image.
@@ -204,7 +209,15 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageOptions) {
      * We usually don't apply transform on vector images, because vector images supports dynamically changing to any size, rasterize to a fixed size will loss details. To modify vector images, you can process the vector data at runtime (such as modifying PDF tag / SVG element).
      * Use this flag to transform them anyway.
      */
-    SDWebImageTransformVectorImage = 1 << 23
+    SDWebImageTransformVectorImage = 1 << 23,
+    
+    /**
+     * By defaults, when you use UI-level category like `sd_setImageWithURL:` on UIImageView, it will cancel the loading image requests.
+     * However, some users may choose to not cancel the loading image requests and always start new pipeline.
+     * Use this flag to disable automatic cancel behavior.
+     * @note This options is UI level options, has no usage on ImageManager or other components.
+     */
+    SDWebImageAvoidAutoCancelImage = 1 << 24,
 };
 
 
@@ -255,6 +268,15 @@ FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextImageC
  */
 FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextImageTransformer;
 
+#pragma mark - Force Decode Options
+
+/**
+ A  NSNumber instance which store the`SDImageForceDecodePolicy` enum. This is used to control how current image loading should force-decode the decoded image (CGImage, typically). See more what's force-decode means in `SDImageForceDecodePolicy` comment.
+ Defaults to `SDImageForceDecodePolicyAutomatic`, which will detect the input CGImage's metadata, and only force-decode if the input CGImage can not directly render on screen (need extra CoreAnimation Copied Image and increase RAM usage).
+ @note If you want to always the force-decode for this image request, pass `SDImageForceDecodePolicyAlways`, for example, some WebP images which does not created by ImageIO.
+ */
+FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextImageForceDecodePolicy;
+
 #pragma mark - Image Decoder Context Options
 
 /**
@@ -293,6 +315,19 @@ FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextImageT
  */
 FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextImageTypeIdentifierHint;
 
+/**
+ A NSUInteger value to provide the limit bytes during decoding. This can help to avoid OOM on large frame count animated image or large pixel static image when you don't know how much RAM it occupied before decoding
+ The decoder will do these logic based on limit bytes:
+ 1. Get the total frame count (static image means 1)
+ 2. Calculate the `framePixelSize` width/height to `sqrt(limitBytes / frameCount / bytesPerPixel)`, keeping aspect ratio (at least 1x1)
+ 3. If the `framePixelSize < originalImagePixelSize`, then do thumbnail decoding (see `SDImageCoderDecodeThumbnailPixelSize`) use the `framePixelSize` and `preseveAspectRatio = YES`
+ 4. Else, use the full pixel decoding (small than limit bytes)
+ 5. Whatever result, this does not effect the animated/static behavior of image. So even if you set `limitBytes = 1 && frameCount = 100`, we will stll create animated image with each frame `1x1` pixel size.
+ @note This option has higher priority than `.imageThumbnailPixelSize`
+ @warning This does not effect the cache key. So which means, this will effect the global cache even next time you query without this option. Pay attention when you use this on global options (It's always recommended to use request-level option for different pipeline)
+ */
+FOUNDATION_EXPORT SDWebImageContextOption _Nonnull const SDWebImageContextImageScaleDownLimitBytes;
+
 #pragma mark - Cache Context Options
 
 /**

+ 22 - 14
Pods/SDWebImage/SDWebImage/Core/SDWebImageDefine.m

@@ -9,6 +9,7 @@
 #import "SDWebImageDefine.h"
 #import "UIImage+Metadata.h"
 #import "NSImage+Compatibility.h"
+#import "SDAnimatedImage.h"
 #import "SDAssociatedObject.h"
 
 #pragma mark - Image scale
@@ -22,20 +23,7 @@ inline CGFloat SDImageScaleFactorForKey(NSString * _Nullable key) {
     if (!key) {
         return scale;
     }
-    // Check if target OS support scale
-#if SD_WATCH
-    if ([[WKInterfaceDevice currentDevice] respondsToSelector:@selector(screenScale)])
-#elif SD_UIKIT
-    if ([[UIScreen mainScreen] respondsToSelector:@selector(scale)])
-#elif SD_MAC
-    NSScreen *mainScreen = nil;
-    if (@available(macOS 10.12, *)) {
-        mainScreen = [NSScreen mainScreen];
-    } else {
-        mainScreen = [NSScreen screens].firstObject;
-    }
-    if ([mainScreen respondsToSelector:@selector(backingScaleFactor)])
-#endif
+    // Now all OS supports retina display scale system
     {
         // a@2x.png -> 8
         if (key.length >= 8) {
@@ -81,6 +69,24 @@ inline UIImage * _Nullable SDScaledImageForScaleFactor(CGFloat scale, UIImage *
         return image;
     }
     UIImage *scaledImage;
+    // Check SDAnimatedImage support for shortcut
+    if ([image.class conformsToProtocol:@protocol(SDAnimatedImage)]) {
+        if ([image respondsToSelector:@selector(animatedCoder)]) {
+            id<SDAnimatedImageCoder> coder = [(id<SDAnimatedImage>)image animatedCoder];
+            if (coder) {
+                scaledImage = [[image.class alloc] initWithAnimatedCoder:coder scale:scale];
+            }
+        } else {
+            // Some class impl does not support `animatedCoder`, keep for compatibility
+            NSData *data = [(id<SDAnimatedImage>)image animatedImageData];
+            if (data) {
+                scaledImage = [[image.class alloc] initWithData:data scale:scale];
+            }
+        }
+        if (scaledImage) {
+            return scaledImage;
+        }
+    }
     if (image.sd_isAnimated) {
         UIImage *animatedImage;
 #if SD_UIKIT || SD_WATCH
@@ -132,11 +138,13 @@ SDWebImageContextOption const SDWebImageContextImageCache = @"imageCache";
 SDWebImageContextOption const SDWebImageContextImageLoader = @"imageLoader";
 SDWebImageContextOption const SDWebImageContextImageCoder = @"imageCoder";
 SDWebImageContextOption const SDWebImageContextImageTransformer = @"imageTransformer";
+SDWebImageContextOption const SDWebImageContextImageForceDecodePolicy = @"imageForceDecodePolicy";
 SDWebImageContextOption const SDWebImageContextImageDecodeOptions = @"imageDecodeOptions";
 SDWebImageContextOption const SDWebImageContextImageScaleFactor = @"imageScaleFactor";
 SDWebImageContextOption const SDWebImageContextImagePreserveAspectRatio = @"imagePreserveAspectRatio";
 SDWebImageContextOption const SDWebImageContextImageThumbnailPixelSize = @"imageThumbnailPixelSize";
 SDWebImageContextOption const SDWebImageContextImageTypeIdentifierHint = @"imageTypeIdentifierHint";
+SDWebImageContextOption const SDWebImageContextImageScaleDownLimitBytes = @"imageScaleDownLimitBytes";
 SDWebImageContextOption const SDWebImageContextImageEncodeOptions = @"imageEncodeOptions";
 SDWebImageContextOption const SDWebImageContextQueryCacheType = @"queryCacheType";
 SDWebImageContextOption const SDWebImageContextStoreCacheType = @"storeCacheType";

+ 7 - 1
Pods/SDWebImage/SDWebImage/Core/SDWebImageDownloader.h

@@ -74,8 +74,10 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageDownloaderOptions) {
     /**
      * By default, we will decode the image in the background during cache query and download from the network. This can help to improve performance because when rendering image on the screen, it need to be firstly decoded. But this happen on the main queue by Core Animation.
      * However, this process may increase the memory usage as well. If you are experiencing a issue due to excessive memory consumption, This flag can prevent decode the image.
+     * @note 5.14.0 introduce `SDImageCoderDecodeUseLazyDecoding`, use that for better control from codec, instead of post-processing. Which acts the similar like this option but works for SDAnimatedImage as well (this one does not)
+     * @deprecated Deprecated in v5.17.0, if you don't want force-decode, pass [.imageForceDecodePolicy] = [SDImageForceDecodePolicy.never] in context option
      */
-    SDWebImageDownloaderAvoidDecodeImage = 1 << 9,
+    SDWebImageDownloaderAvoidDecodeImage API_DEPRECATED("Use SDWebImageContextImageForceDecodePolicy instead", macos(10.10, 10.10), ios(8.0, 8.0), tvos(9.0, 9.0), watchos(2.0, 2.0)) = 1 << 9,
     
     /**
      * By default, we decode the animated image. This flag can force decode the first frame only and produce the static image.
@@ -95,9 +97,13 @@ typedef NS_OPTIONS(NSUInteger, SDWebImageDownloaderOptions) {
     SDWebImageDownloaderMatchAnimatedImageClass = 1 << 12,
 };
 
+/// Posed when URLSessionTask started (`resume` called))
 FOUNDATION_EXPORT NSNotificationName _Nonnull const SDWebImageDownloadStartNotification;
+/// Posed when URLSessionTask get HTTP response (`didReceiveResponse:completionHandler:` called)
 FOUNDATION_EXPORT NSNotificationName _Nonnull const SDWebImageDownloadReceiveResponseNotification;
+/// Posed when URLSessionTask stoped (`didCompleteWithError:` with error or `cancel` called)
 FOUNDATION_EXPORT NSNotificationName _Nonnull const SDWebImageDownloadStopNotification;
+/// Posed when URLSessionTask finished with success  (`didCompleteWithError:` without error)
 FOUNDATION_EXPORT NSNotificationName _Nonnull const SDWebImageDownloadFinishNotification;
 
 typedef SDImageLoaderProgressBlock SDWebImageDownloaderProgressBlock;

+ 10 - 25
Pods/SDWebImage/SDWebImage/Core/SDWebImageDownloader.m

@@ -21,22 +21,6 @@ NSNotificationName const SDWebImageDownloadStopNotification = @"SDWebImageDownlo
 NSNotificationName const SDWebImageDownloadFinishNotification = @"SDWebImageDownloadFinishNotification";
 
 static void * SDWebImageDownloaderContext = &SDWebImageDownloaderContext;
-static void * SDWebImageDownloaderOperationKey = &SDWebImageDownloaderOperationKey;
-
-BOOL SDWebImageDownloaderOperationGetCompleted(id<SDWebImageDownloaderOperation> operation) {
-    NSCParameterAssert(operation);
-    NSNumber *value = objc_getAssociatedObject(operation, SDWebImageDownloaderOperationKey);
-    if (value != nil) {
-        return value.boolValue;
-    } else {
-        return NO;
-    }
-}
-
-void SDWebImageDownloaderOperationSetCompleted(id<SDWebImageDownloaderOperation> operation, BOOL isCompleted) {
-    NSCParameterAssert(operation);
-    objc_setAssociatedObject(operation, SDWebImageDownloaderOperationKey, @(isCompleted), OBJC_ASSOCIATION_RETAIN);
-}
 
 @interface SDWebImageDownloadToken ()
 
@@ -120,11 +104,12 @@ void SDWebImageDownloaderOperationSetCompleted(id<SDWebImageDownloaderOperation>
         _URLOperations = [NSMutableDictionary new];
         NSMutableDictionary<NSString *, NSString *> *headerDictionary = [NSMutableDictionary dictionary];
         NSString *userAgent = nil;
-#if SD_UIKIT
         // User-Agent Header; see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.43
+#if SD_VISION
+        userAgent = [NSString stringWithFormat:@"%@/%@ (%@; visionOS %@; Scale/%0.2f)", [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleExecutableKey] ?: [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleIdentifierKey], [[NSBundle mainBundle] infoDictionary][@"CFBundleShortVersionString"] ?: [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleVersionKey], [[UIDevice currentDevice] model], [[UIDevice currentDevice] systemVersion], UITraitCollection.currentTraitCollection.displayScale];
+#elif SD_UIKIT
         userAgent = [NSString stringWithFormat:@"%@/%@ (%@; iOS %@; Scale/%0.2f)", [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleExecutableKey] ?: [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleIdentifierKey], [[NSBundle mainBundle] infoDictionary][@"CFBundleShortVersionString"] ?: [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleVersionKey], [[UIDevice currentDevice] model], [[UIDevice currentDevice] systemVersion], [[UIScreen mainScreen] scale]];
 #elif SD_WATCH
-        // User-Agent Header; see http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.43
         userAgent = [NSString stringWithFormat:@"%@/%@ (%@; watchOS %@; Scale/%0.2f)", [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleExecutableKey] ?: [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleIdentifierKey], [[NSBundle mainBundle] infoDictionary][@"CFBundleShortVersionString"] ?: [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleVersionKey], [[WKInterfaceDevice currentDevice] model], [[WKInterfaceDevice currentDevice] systemVersion], [[WKInterfaceDevice currentDevice] screenScale]];
 #elif SD_MAC
         userAgent = [NSString stringWithFormat:@"%@/%@ (Mac OS X %@)", [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleExecutableKey] ?: [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleIdentifierKey], [[NSBundle mainBundle] infoDictionary][@"CFBundleShortVersionString"] ?: [[NSBundle mainBundle] infoDictionary][(__bridge NSString *)kCFBundleVersionKey], [[NSProcessInfo processInfo] operatingSystemVersionString]];
@@ -239,7 +224,7 @@ void SDWebImageDownloaderOperationSetCompleted(id<SDWebImageDownloaderOperation>
     BOOL shouldNotReuseOperation;
     if (operation) {
         @synchronized (operation) {
-            shouldNotReuseOperation = operation.isFinished || operation.isCancelled || SDWebImageDownloaderOperationGetCompleted(operation);
+            shouldNotReuseOperation = operation.isFinished || operation.isCancelled;
         }
     } else {
         shouldNotReuseOperation = YES;
@@ -288,6 +273,8 @@ void SDWebImageDownloaderOperationSetCompleted(id<SDWebImageDownloaderOperation>
 }
 
 #pragma mark Helper methods
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
 + (SDWebImageOptions)imageOptionsFromDownloaderOptions:(SDWebImageDownloaderOptions)downloadOptions {
     SDWebImageOptions options = 0;
     if (downloadOptions & SDWebImageDownloaderScaleDownLargeImages) options |= SDWebImageScaleDownLargeImages;
@@ -298,6 +285,7 @@ void SDWebImageDownloaderOperationSetCompleted(id<SDWebImageDownloaderOperation>
     
     return options;
 }
+#pragma clang diagnostic pop
 
 - (nullable NSOperation<SDWebImageDownloaderOperation> *)createDownloaderOperationWithUrl:(nonnull NSURL *)url
                                                                                   options:(SDWebImageDownloaderOptions)options
@@ -515,12 +503,6 @@ didReceiveResponse:(NSURLResponse *)response
     
     // Identify the operation that runs this task and pass it the delegate method
     NSOperation<SDWebImageDownloaderOperation> *dataOperation = [self operationWithTask:task];
-    if (dataOperation) {
-        @synchronized (dataOperation) {
-            // Mark the downloader operation `isCompleted = YES`, no longer re-use this operation when new request comes in
-            SDWebImageDownloaderOperationSetCompleted(dataOperation, YES);
-        }
-    }
     if ([dataOperation respondsToSelector:@selector(URLSession:task:didCompleteWithError:)]) {
         [dataOperation URLSession:session task:task didCompleteWithError:error];
     }
@@ -625,6 +607,8 @@ didReceiveResponse:(NSURLResponse *)response
     return YES;
 }
 
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
 - (id<SDWebImageOperation>)requestImageWithURL:(NSURL *)url options:(SDWebImageOptions)options context:(SDWebImageContext *)context progress:(SDImageLoaderProgressBlock)progressBlock completed:(SDImageLoaderCompletedBlock)completedBlock {
     UIImage *cachedImage = context[SDWebImageContextLoaderCachedImage];
     
@@ -651,6 +635,7 @@ didReceiveResponse:(NSURLResponse *)response
     
     return [self downloadImageWithURL:url options:downloaderOptions context:context progress:progressBlock completed:completedBlock];
 }
+#pragma clang diagnostic pop
 
 - (BOOL)shouldBlockFailedURLWithURL:(NSURL *)url error:(NSError *)error {
     return [self shouldBlockFailedURLWithURL:url error:error options:0 context:nil];

+ 97 - 63
Pods/SDWebImage/SDWebImage/Core/SDWebImageDownloaderOperation.m

@@ -14,8 +14,6 @@
 #import "SDImageCacheDefine.h"
 #import "SDCallbackQueue.h"
 
-BOOL SDWebImageDownloaderOperationGetCompleted(id<SDWebImageDownloaderOperation> operation); // Private currently, mark open if needed
-
 // A handler to represent individual request
 @interface SDWebImageDownloaderOperationToken : NSObject
 
@@ -62,6 +60,8 @@ BOOL SDWebImageDownloaderOperationGetCompleted(id<SDWebImageDownloaderOperation>
 @property (strong, nonatomic, nullable) NSError *responseError;
 @property (assign, nonatomic) double previousProgress; // previous progress percent
 
+@property (assign, nonatomic, getter = isDownloadCompleted) BOOL downloadCompleted;
+
 @property (strong, nonatomic, nullable) id<SDWebImageDownloaderResponseModifier> responseModifier; // modify original URLResponse
 @property (strong, nonatomic, nullable) id<SDWebImageDownloaderDecryptor> decryptor; // decrypt image data
 
@@ -112,6 +112,7 @@ BOOL SDWebImageDownloaderOperationGetCompleted(id<SDWebImageDownloaderOperation>
         _finished = NO;
         _expectedSize = 0;
         _unownedSession = session;
+        _downloadCompleted = NO;
         _coderQueue = [[NSOperationQueue alloc] init];
         _coderQueue.maxConcurrentOperationCount = 1;
         _coderQueue.name = @"com.hackemist.SDWebImageDownloaderOperation.coderQueue";
@@ -338,6 +339,90 @@ BOOL SDWebImageDownloaderOperationGetCompleted(id<SDWebImageDownloaderOperation>
     return YES;
 }
 
+// Check for unprocessed tokens.
+// if all tokens have been processed call [self done].
+- (void)checkDoneWithImageData:(NSData *)imageData
+                finishedTokens:(NSArray<SDWebImageDownloaderOperationToken *> *)finishedTokens {
+    @synchronized (self) {
+        NSMutableArray<SDWebImageDownloaderOperationToken *> *tokens = [self.callbackTokens mutableCopy];
+        [finishedTokens enumerateObjectsUsingBlock:^(SDWebImageDownloaderOperationToken * _Nonnull obj, NSUInteger idx, BOOL * _Nonnull stop) {
+            [tokens removeObjectIdenticalTo:obj];
+        }];
+        if (tokens.count == 0) {
+            [self done];
+        } else {
+            // If there are new tokens added during the decoding operation, the decoding operation is supplemented with these new tokens.
+            [self startCoderOperationWithImageData:imageData pendingTokens:tokens finishedTokens:finishedTokens];
+        }
+    }
+}
+
+- (void)startCoderOperationWithImageData:(NSData *)imageData
+                           pendingTokens:(NSArray<SDWebImageDownloaderOperationToken *> *)pendingTokens
+                          finishedTokens:(NSArray<SDWebImageDownloaderOperationToken *> *)finishedTokens {
+    @weakify(self);
+    for (SDWebImageDownloaderOperationToken *token in pendingTokens) {
+        [self.coderQueue addOperationWithBlock:^{
+            @strongify(self);
+            if (!self) {
+                return;
+            }
+            UIImage *image;
+            // check if we already decode this variant of image for current callback
+            if (token.decodeOptions) {
+                image = [self.imageMap objectForKey:token.decodeOptions];
+            }
+            if (!image) {
+                // check if we already use progressive decoding, use that to produce faster decoding
+                id<SDProgressiveImageCoder> progressiveCoder = SDImageLoaderGetProgressiveCoder(self);
+                SDWebImageOptions options = [[self class] imageOptionsFromDownloaderOptions:self.options];
+                SDWebImageContext *context;
+                if (token.decodeOptions) {
+                    SDWebImageMutableContext *mutableContext = [NSMutableDictionary dictionaryWithDictionary:self.context];
+                    SDSetDecodeOptionsToContext(mutableContext, &options, token.decodeOptions);
+                    context = [mutableContext copy];
+                } else {
+                    context = self.context;
+                }
+                if (progressiveCoder) {
+                    image = SDImageLoaderDecodeProgressiveImageData(imageData, self.request.URL, YES, self, options, context);
+                } else {
+                    image = SDImageLoaderDecodeImageData(imageData, self.request.URL, options, context);
+                }
+                if (image && token.decodeOptions) {
+                    [self.imageMap setObject:image forKey:token.decodeOptions];
+                }
+            }
+            CGSize imageSize = image.size;
+            if (imageSize.width == 0 || imageSize.height == 0) {
+                NSString *description = image == nil ? @"Downloaded image decode failed" : @"Downloaded image has 0 pixels";
+                NSError *error = [NSError errorWithDomain:SDWebImageErrorDomain code:SDWebImageErrorBadImageData userInfo:@{NSLocalizedDescriptionKey : description}];
+                [self callCompletionBlockWithToken:token image:nil imageData:nil error:error finished:YES];
+            } else {
+                [self callCompletionBlockWithToken:token image:image imageData:imageData error:nil finished:YES];
+            }
+        }];
+    }
+    // call [self done] after all completed block was dispatched
+    dispatch_block_t doneBlock = ^{
+        @strongify(self);
+        if (!self) {
+            return;
+        }
+        // Check for new tokens added during the decode operation.
+        [self checkDoneWithImageData:imageData
+                      finishedTokens:[finishedTokens arrayByAddingObjectsFromArray:pendingTokens]];
+    };
+    if (@available(iOS 13, tvOS 13, macOS 10.15, watchOS 6, *)) {
+        // seems faster than `addOperationWithBlock`
+        [self.coderQueue addBarrierBlock:doneBlock];
+    } else {
+        // serial queue, this does the same effect in semantics
+        [self.coderQueue addOperationWithBlock:doneBlock];
+    }
+
+}
+
 #pragma mark NSURLSessionDataDelegate
 
 - (void)URLSession:(NSURLSession *)session
@@ -468,7 +553,7 @@ didReceiveResponse:(NSURLResponse *)response
         NSData *imageData = self.imageData;
         
         // keep maximum one progressive decode process during download
-        if (self.coderQueue.operationCount == 0) {
+        if (imageData && self.coderQueue.operationCount == 0) {
             // NSOperation have autoreleasepool, don't need to create extra one
             @weakify(self);
             [self.coderQueue addOperationWithBlock:^{
@@ -478,7 +563,7 @@ didReceiveResponse:(NSURLResponse *)response
                 }
                 // When cancelled or transfer finished (`didCompleteWithError`), cancel the progress callback, only completed block is called and enough
                 @synchronized (self) {
-                    if (self.isCancelled || SDWebImageDownloaderOperationGetCompleted(self)) {
+                    if (self.isCancelled || self.isDownloadCompleted) {
                         return;
                     }
                 }
@@ -521,6 +606,8 @@ didReceiveResponse:(NSURLResponse *)response
     // If we already cancel the operation or anything mark the operation finished, don't callback twice
     if (self.isFinished) return;
     
+    self.downloadCompleted = YES;
+    
     NSArray<SDWebImageDownloaderOperationToken *> *tokens;
     @synchronized (self) {
         tokens = [self.callbackTokens copy];
@@ -545,7 +632,6 @@ didReceiveResponse:(NSURLResponse *)response
     } else {
         if (tokens.count > 0) {
             NSData *imageData = self.imageData;
-            self.imageData = nil;
             // data decryptor
             if (imageData && self.decryptor) {
                 imageData = [self.decryptor decryptedDataWithData:imageData response:self.response];
@@ -565,64 +651,9 @@ didReceiveResponse:(NSURLResponse *)response
                 } else {
                     // decode the image in coder queue, cancel all previous decoding process
                     [self.coderQueue cancelAllOperations];
-                    @weakify(self);
-                    for (SDWebImageDownloaderOperationToken *token in tokens) {
-                        [self.coderQueue addOperationWithBlock:^{
-                            @strongify(self);
-                            if (!self) {
-                                return;
-                            }
-                            UIImage *image;
-                            // check if we already decode this variant of image for current callback
-                            if (token.decodeOptions) {
-                                image = [self.imageMap objectForKey:token.decodeOptions];
-                            }
-                            if (!image) {
-                                // check if we already use progressive decoding, use that to produce faster decoding
-                                id<SDProgressiveImageCoder> progressiveCoder = SDImageLoaderGetProgressiveCoder(self);
-                                SDWebImageOptions options = [[self class] imageOptionsFromDownloaderOptions:self.options];
-                                SDWebImageContext *context;
-                                if (token.decodeOptions) {
-                                    SDWebImageMutableContext *mutableContext = [NSMutableDictionary dictionaryWithDictionary:self.context];
-                                    SDSetDecodeOptionsToContext(mutableContext, &options, token.decodeOptions);
-                                    context = [mutableContext copy];
-                                } else {
-                                    context = self.context;
-                                }
-                                if (progressiveCoder) {
-                                    image = SDImageLoaderDecodeProgressiveImageData(imageData, self.request.URL, YES, self, options, context);
-                                } else {
-                                    image = SDImageLoaderDecodeImageData(imageData, self.request.URL, options, context);
-                                }
-                                if (image && token.decodeOptions) {
-                                    [self.imageMap setObject:image forKey:token.decodeOptions];
-                                }
-                            }
-                            CGSize imageSize = image.size;
-                            if (imageSize.width == 0 || imageSize.height == 0) {
-                                NSString *description = image == nil ? @"Downloaded image decode failed" : @"Downloaded image has 0 pixels";
-                                NSError *error = [NSError errorWithDomain:SDWebImageErrorDomain code:SDWebImageErrorBadImageData userInfo:@{NSLocalizedDescriptionKey : description}];
-                                [self callCompletionBlockWithToken:token image:nil imageData:nil error:error finished:YES];
-                            } else {
-                                [self callCompletionBlockWithToken:token image:image imageData:imageData error:nil finished:YES];
-                            }
-                        }];
-                    }
-                    // call [self done] after all completed block was dispatched
-                    dispatch_block_t doneBlock = ^{
-                        @strongify(self);
-                        if (!self) {
-                            return;
-                        }
-                        [self done];
-                    };
-                    if (@available(iOS 13, tvOS 13, macOS 10.15, watchOS 6, *)) {
-                        // seems faster than `addOperationWithBlock`
-                        [self.coderQueue addBarrierBlock:doneBlock];
-                    } else {
-                        // serial queue, this does the same effect in semantics
-                        [self.coderQueue addOperationWithBlock:doneBlock];
-                    }
+                    [self startCoderOperationWithImageData:imageData
+                                             pendingTokens:tokens
+                                            finishedTokens:@[]];
                 }
             } else {
                 [self callCompletionBlocksWithError:[NSError errorWithDomain:SDWebImageErrorDomain code:SDWebImageErrorBadImageData userInfo:@{NSLocalizedDescriptionKey : @"Image data is nil"}]];
@@ -671,6 +702,8 @@ didReceiveResponse:(NSURLResponse *)response
 }
 
 #pragma mark Helper methods
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wdeprecated-declarations"
 + (SDWebImageOptions)imageOptionsFromDownloaderOptions:(SDWebImageDownloaderOptions)downloadOptions {
     SDWebImageOptions options = 0;
     if (downloadOptions & SDWebImageDownloaderScaleDownLargeImages) options |= SDWebImageScaleDownLargeImages;
@@ -681,6 +714,7 @@ didReceiveResponse:(NSURLResponse *)response
     
     return options;
 }
+#pragma clang diagnostic pop
 
 - (BOOL)shouldContinueWhenAppEntersBackground {
     return SD_OPTIONS_CONTAINS(self.options, SDWebImageDownloaderContinueInBackground);

+ 1 - 0
Pods/SDWebImage/SDWebImage/Core/SDWebImageError.h

@@ -9,6 +9,7 @@
 
 #import "SDWebImageCompat.h"
 
+/// An error domain represent SDWebImage loading system with custom codes
 FOUNDATION_EXPORT NSErrorDomain const _Nonnull SDWebImageErrorDomain;
 
 /// The response instance for invalid download response (NSURLResponse *)

+ 2 - 0
Pods/SDWebImage/SDWebImage/Core/SDWebImageIndicator.h

@@ -66,6 +66,7 @@
  */
 @interface SDWebImageActivityIndicator (Conveniences)
 
+#if !SD_VISION
 /// These indicator use the fixed color without dark mode support
 /// gray-style activity indicator
 @property (nonatomic, class, nonnull, readonly) SDWebImageActivityIndicator *grayIndicator;
@@ -75,6 +76,7 @@
 @property (nonatomic, class, nonnull, readonly) SDWebImageActivityIndicator *whiteIndicator;
 /// large white-style activity indicator
 @property (nonatomic, class, nonnull, readonly) SDWebImageActivityIndicator *whiteLargeIndicator;
+#endif
 /// These indicator use the system style, supports dark mode if available (iOS 13+/macOS 10.14+)
 /// large activity indicator
 @property (nonatomic, class, nonnull, readonly) SDWebImageActivityIndicator *largeIndicator;

+ 19 - 3
Pods/SDWebImage/SDWebImage/Core/SDWebImageIndicator.m

@@ -41,7 +41,17 @@
 #pragma clang diagnostic push
 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
 - (void)commonInit {
-    self.indicatorView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhite];
+#if SD_VISION
+    UIActivityIndicatorViewStyle style = UIActivityIndicatorViewStyleMedium;
+#else
+    UIActivityIndicatorViewStyle style;
+    if (@available(iOS 13.0, tvOS 13.0, *)) {
+        style = UIActivityIndicatorViewStyleMedium;
+    } else {
+        style = UIActivityIndicatorViewStyleWhite;
+    }
+#endif
+    self.indicatorView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:style];
     self.indicatorView.autoresizingMask = UIViewAutoresizingFlexibleTopMargin | UIViewAutoresizingFlexibleLeftMargin | UIViewAutoresizingFlexibleRightMargin | UIViewAutoresizingFlexibleBottomMargin;
 }
 #pragma clang diagnostic pop
@@ -79,6 +89,7 @@
 
 @implementation SDWebImageActivityIndicator (Conveniences)
 
+#if !SD_VISION
 #pragma clang diagnostic push
 #pragma clang diagnostic ignored "-Wdeprecated-declarations"
 + (SDWebImageActivityIndicator *)grayIndicator {
@@ -134,10 +145,13 @@
 #endif
     return indicator;
 }
+#endif
 
 + (SDWebImageActivityIndicator *)largeIndicator {
     SDWebImageActivityIndicator *indicator = [SDWebImageActivityIndicator new];
-#if SD_UIKIT
+#if SD_VISION
+    indicator.indicatorView.activityIndicatorViewStyle = UIActivityIndicatorViewStyleLarge;
+#elif SD_UIKIT
     if (@available(iOS 13.0, tvOS 13.0, *)) {
         indicator.indicatorView.activityIndicatorViewStyle = UIActivityIndicatorViewStyleLarge;
     } else {
@@ -152,7 +166,9 @@
 
 + (SDWebImageActivityIndicator *)mediumIndicator {
     SDWebImageActivityIndicator *indicator = [SDWebImageActivityIndicator new];
-#if SD_UIKIT
+#if SD_VISION
+    indicator.indicatorView.activityIndicatorViewStyle = UIActivityIndicatorViewStyleMedium;
+#elif SD_UIKIT
     if (@available(iOS 13.0, tvOS 13.0, *)) {
         indicator.indicatorView.activityIndicatorViewStyle = UIActivityIndicatorViewStyleMedium;
     } else {

+ 15 - 0
Pods/SDWebImage/SDWebImage/Core/UIButton+WebCache.h

@@ -21,6 +21,7 @@
 
 /**
  * Get the current image URL.
+ * This simply translate to `[self sd_imageURLForState:self.state]` from v5.18.0
  */
 @property (nonatomic, strong, readonly, nullable) NSURL *sd_currentImageURL;
 
@@ -31,6 +32,13 @@
  */
 - (nullable NSURL *)sd_imageURLForState:(UIControlState)state;
 
+/**
+ * Get the image operation key for a control state.
+ *
+ * @param state Which state you want to know the URL for. The values are described in UIControlState.
+ */
+- (nonnull NSString *)sd_imageOperationKeyForState:(UIControlState)state;
+
 /**
  * Set the button `image` with an `url`.
  *
@@ -202,6 +210,13 @@
  */
 @property (nonatomic, strong, readonly, nullable) NSURL *sd_currentBackgroundImageURL;
 
+/**
+ * Get the background image operation key for a control state.
+ *
+ * @param state Which state you want to know the URL for. The values are described in UIControlState.
+ */
+- (nonnull NSString *)sd_backgroundImageOperationKeyForState:(UIControlState)state;
+
 /**
  * Get the background image URL for a control state.
  * 

+ 35 - 71
Pods/SDWebImage/SDWebImage/Core/UIButton+WebCache.m

@@ -12,47 +12,14 @@
 
 #import "objc/runtime.h"
 #import "UIView+WebCacheOperation.h"
+#import "UIView+WebCacheState.h"
 #import "UIView+WebCache.h"
 #import "SDInternalMacros.h"
 
-static char imageURLStorageKey;
-
-typedef NSMutableDictionary<NSString *, NSURL *> SDStateImageURLDictionary;
-
-static inline NSString * imageURLKeyForState(UIControlState state) {
-    return [NSString stringWithFormat:@"image_%lu", (unsigned long)state];
-}
-
-static inline NSString * backgroundImageURLKeyForState(UIControlState state) {
-    return [NSString stringWithFormat:@"backgroundImage_%lu", (unsigned long)state];
-}
-
-static inline NSString * imageOperationKeyForState(UIControlState state) {
-    return [NSString stringWithFormat:@"UIButtonImageOperation%lu", (unsigned long)state];
-}
-
-static inline NSString * backgroundImageOperationKeyForState(UIControlState state) {
-    return [NSString stringWithFormat:@"UIButtonBackgroundImageOperation%lu", (unsigned long)state];
-}
-
 @implementation UIButton (WebCache)
 
 #pragma mark - Image
 
-- (nullable NSURL *)sd_currentImageURL {
-    NSURL *url = self.sd_imageURLStorage[imageURLKeyForState(self.state)];
-
-    if (!url) {
-        url = self.sd_imageURLStorage[imageURLKeyForState(UIControlStateNormal)];
-    }
-
-    return url;
-}
-
-- (nullable NSURL *)sd_imageURLForState:(UIControlState)state {
-    return self.sd_imageURLStorage[imageURLKeyForState(state)];
-}
-
 - (void)sd_setImageWithURL:(nullable NSURL *)url forState:(UIControlState)state {
     [self sd_setImageWithURL:url forState:state placeholderImage:nil options:0 completed:nil];
 }
@@ -92,19 +59,13 @@ static inline NSString * backgroundImageOperationKeyForState(UIControlState stat
                    context:(nullable SDWebImageContext *)context
                   progress:(nullable SDImageLoaderProgressBlock)progressBlock
                  completed:(nullable SDExternalCompletionBlock)completedBlock {
-    if (!url) {
-        [self.sd_imageURLStorage removeObjectForKey:imageURLKeyForState(state)];
-    } else {
-        self.sd_imageURLStorage[imageURLKeyForState(state)] = url;
-    }
-    
     SDWebImageMutableContext *mutableContext;
     if (context) {
         mutableContext = [context mutableCopy];
     } else {
         mutableContext = [NSMutableDictionary dictionary];
     }
-    mutableContext[SDWebImageContextSetImageOperationKey] = imageOperationKeyForState(state);
+    mutableContext[SDWebImageContextSetImageOperationKey] = [self sd_imageOperationKeyForState:state];
     @weakify(self);
     [self sd_internalSetImageWithURL:url
                     placeholderImage:placeholder
@@ -124,20 +85,6 @@ static inline NSString * backgroundImageOperationKeyForState(UIControlState stat
 
 #pragma mark - Background Image
 
-- (nullable NSURL *)sd_currentBackgroundImageURL {
-    NSURL *url = self.sd_imageURLStorage[backgroundImageURLKeyForState(self.state)];
-    
-    if (!url) {
-        url = self.sd_imageURLStorage[backgroundImageURLKeyForState(UIControlStateNormal)];
-    }
-    
-    return url;
-}
-
-- (nullable NSURL *)sd_backgroundImageURLForState:(UIControlState)state {
-    return self.sd_imageURLStorage[backgroundImageURLKeyForState(state)];
-}
-
 - (void)sd_setBackgroundImageWithURL:(nullable NSURL *)url forState:(UIControlState)state {
     [self sd_setBackgroundImageWithURL:url forState:state placeholderImage:nil options:0 completed:nil];
 }
@@ -177,19 +124,13 @@ static inline NSString * backgroundImageOperationKeyForState(UIControlState stat
                              context:(nullable SDWebImageContext *)context
                             progress:(nullable SDImageLoaderProgressBlock)progressBlock
                            completed:(nullable SDExternalCompletionBlock)completedBlock {
-    if (!url) {
-        [self.sd_imageURLStorage removeObjectForKey:backgroundImageURLKeyForState(state)];
-    } else {
-        self.sd_imageURLStorage[backgroundImageURLKeyForState(state)] = url;
-    }
-    
     SDWebImageMutableContext *mutableContext;
     if (context) {
         mutableContext = [context mutableCopy];
     } else {
         mutableContext = [NSMutableDictionary dictionary];
     }
-    mutableContext[SDWebImageContextSetImageOperationKey] = backgroundImageOperationKeyForState(state);
+    mutableContext[SDWebImageContextSetImageOperationKey] = [self sd_backgroundImageOperationKeyForState:state];
     @weakify(self);
     [self sd_internalSetImageWithURL:url
                     placeholderImage:placeholder
@@ -210,23 +151,46 @@ static inline NSString * backgroundImageOperationKeyForState(UIControlState stat
 #pragma mark - Cancel
 
 - (void)sd_cancelImageLoadForState:(UIControlState)state {
-    [self sd_cancelImageLoadOperationWithKey:imageOperationKeyForState(state)];
+    [self sd_cancelImageLoadOperationWithKey:[self sd_imageOperationKeyForState:state]];
 }
 
 - (void)sd_cancelBackgroundImageLoadForState:(UIControlState)state {
-    [self sd_cancelImageLoadOperationWithKey:backgroundImageOperationKeyForState(state)];
+    [self sd_cancelImageLoadOperationWithKey:[self sd_backgroundImageOperationKeyForState:state]];
 }
 
-#pragma mark - Private
+#pragma mark - State
+
+- (NSString *)sd_imageOperationKeyForState:(UIControlState)state {
+    return [NSString stringWithFormat:@"UIButtonImageOperation%lu", (unsigned long)state];
+}
 
-- (SDStateImageURLDictionary *)sd_imageURLStorage {
-    SDStateImageURLDictionary *storage = objc_getAssociatedObject(self, &imageURLStorageKey);
-    if (!storage) {
-        storage = [NSMutableDictionary dictionary];
-        objc_setAssociatedObject(self, &imageURLStorageKey, storage, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+- (NSString *)sd_backgroundImageOperationKeyForState:(UIControlState)state {
+    return [NSString stringWithFormat:@"UIButtonBackgroundImageOperation%lu", (unsigned long)state];
+}
+
+- (NSURL *)sd_currentImageURL {
+    NSURL *url = [self sd_imageURLForState:self.state];
+    if (!url) {
+        [self sd_imageURLForState:UIControlStateNormal];
+    }
+    return url;
+}
+
+- (NSURL *)sd_imageURLForState:(UIControlState)state {
+    return [self sd_imageLoadStateForKey:[self sd_imageOperationKeyForState:state]].url;
+}
+#pragma mark - Background State
+
+- (NSURL *)sd_currentBackgroundImageURL {
+    NSURL *url = [self sd_backgroundImageURLForState:self.state];
+    if (!url) {
+        url = [self sd_backgroundImageURLForState:UIControlStateNormal];
     }
+    return url;
+}
 
-    return storage;
+- (NSURL *)sd_backgroundImageURLForState:(UIControlState)state {
+    return [self sd_imageLoadStateForKey:[self sd_backgroundImageOperationKeyForState:state]].url;
 }
 
 @end

+ 6 - 0
Pods/SDWebImage/SDWebImage/Core/UIImage+ForceDecode.h

@@ -15,6 +15,12 @@
 
 /**
  A bool value indicating whether the image has already been decoded. This can help to avoid extra force decode.
+ Force decode is used for 2 cases:
+ -- 1. for ImageIO created image (via `CGImageCreateWithImageSource` SPI), it's lazy and we trigger the decode before rendering
+ -- 2. for non-ImageIO created image (via `CGImageCreate` API), we can ensure it's alignment is suitable to render on screen without copy by CoreAnimation
+ @note For coder plugin developer, always use the SDImageCoderHelper's `colorSpaceGetDeviceRGB`/`preferredPixelFormat` to create CGImage.
+ @note For more information why force decode, see: https://github.com/path/FastImageCache#byte-alignment
+ @note From v5.17.0, the default value is always NO. Use `SDImageForceDecodePolicy` to control complicated policy.
  */
 @property (nonatomic, assign) BOOL sd_isDecoded;
 

+ 1 - 18
Pods/SDWebImage/SDWebImage/Core/UIImage+ForceDecode.m

@@ -15,24 +15,7 @@
 
 - (BOOL)sd_isDecoded {
     NSNumber *value = objc_getAssociatedObject(self, @selector(sd_isDecoded));
-    if (value != nil) {
-        return value.boolValue;
-    } else {
-        // Assume only CGImage based can use lazy decoding
-        CGImageRef cgImage = self.CGImage;
-        if (cgImage) {
-            CFStringRef uttype = CGImageGetUTType(self.CGImage);
-            if (uttype) {
-                // Only ImageIO can set `com.apple.ImageIO.imageSourceTypeIdentifier`
-                return NO;
-            } else {
-                // Thumbnail or CGBitmapContext drawn image
-                return YES;
-            }
-        }
-    }
-    // Assume others as non-decoded
-    return NO;
+    return [value boolValue];
 }
 
 - (void)setSd_isDecoded:(BOOL)sd_isDecoded {

+ 1 - 0
Pods/SDWebImage/SDWebImage/Core/UIImage+MultiFormat.h

@@ -46,6 +46,7 @@
  Encode the current image to the data, the image format is unspecified
 
  @note If the receiver is `SDAnimatedImage`, this will return the animated image data if available. No more extra encoding process.
+ @note For macOS, if the receiver contains only `SDAnimatedImageRep`, this will return the animated image data if available. No more extra encoding process.
  @return The encoded data. If can't encode, return nil
  */
 - (nullable NSData *)sd_imageData;

+ 13 - 0
Pods/SDWebImage/SDWebImage/Core/UIImage+MultiFormat.m

@@ -8,6 +8,7 @@
 
 #import "UIImage+MultiFormat.h"
 #import "SDImageCodersManager.h"
+#import "SDAnimatedImageRep.h"
 
 @implementation UIImage (MultiFormat)
 
@@ -28,6 +29,18 @@
 }
 
 - (nullable NSData *)sd_imageData {
+#if SD_MAC
+    NSRect imageRect = NSMakeRect(0, 0, self.size.width, self.size.height);
+    NSImageRep *imageRep = [self bestRepresentationForRect:imageRect context:nil hints:nil];
+    // Check weak animated data firstly
+    if ([imageRep isKindOfClass:[SDAnimatedImageRep class]]) {
+        SDAnimatedImageRep *animatedImageRep = (SDAnimatedImageRep *)imageRep;
+        NSData *imageData = [animatedImageRep animatedImageData];
+        if (imageData) {
+            return imageData;
+        }
+    }
+#endif
     return [self sd_imageDataAsFormat:SDImageFormatUndefined];
 }
 

+ 4 - 0
Pods/SDWebImage/SDWebImage/Core/UIImage+Transform.h

@@ -8,9 +8,13 @@
 
 #import "SDWebImageCompat.h"
 
+/// The scale mode to apply when image drawing on a container with different sizes.
 typedef NS_ENUM(NSUInteger, SDImageScaleMode) {
+    /// The option to scale the content to fit the size of itself by changing the aspect ratio of the content if necessary.
     SDImageScaleModeFill = 0,
+    /// The option to scale the content to fit the size of the view by maintaining the aspect ratio. Any remaining area of the view’s bounds is transparent.
     SDImageScaleModeAspectFit = 1,
+    /// The option to scale the content to fill the size of the view. Some portion of the content may be clipped to fill the view’s bounds.
     SDImageScaleModeAspectFill = 2
 };
 

+ 30 - 9
Pods/SDWebImage/SDWebImage/Core/UIImage+Transform.m

@@ -57,7 +57,7 @@ static inline CGRect SDCGRectFitWithScaleMode(CGRect rect, CGSize size, SDImageS
     return rect;
 }
 
-static inline UIColor * SDGetColorFromGrayscale(Pixel_88 pixel, CGBitmapInfo bitmapInfo) {
+static inline UIColor * SDGetColorFromGrayscale(Pixel_88 pixel, CGBitmapInfo bitmapInfo, CGColorSpaceRef cgColorSpace) {
     // Get alpha info, byteOrder info
     CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
     CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
@@ -135,11 +135,18 @@ static inline UIColor * SDGetColorFromGrayscale(Pixel_88 pixel, CGBitmapInfo bit
         default:
             break;
     }
-    
+#if SD_MAC
+    // Mac supports ColorSync, to ensure the same bahvior, we convert color to sRGB
+    NSColorSpace *colorSpace = [[NSColorSpace alloc] initWithCGColorSpace:cgColorSpace];
+    CGFloat components[2] = {w, a};
+    NSColor *color = [NSColor colorWithColorSpace:colorSpace components:components count:2];
+    return [color colorUsingColorSpace:NSColorSpace.genericGamma22GrayColorSpace];
+#else
     return [UIColor colorWithWhite:w alpha:a];
+#endif
 }
 
-static inline UIColor * SDGetColorFromRGBA(Pixel_8888 pixel, CGBitmapInfo bitmapInfo) {
+static inline UIColor * SDGetColorFromRGBA(Pixel_8888 pixel, CGBitmapInfo bitmapInfo, CGColorSpaceRef cgColorSpace) {
     // Get alpha info, byteOrder info
     CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
     CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
@@ -150,8 +157,10 @@ static inline UIColor * SDGetColorFromRGBA(Pixel_8888 pixel, CGBitmapInfo bitmap
         case kCGBitmapByteOrderDefault: {
             byteOrderNormal = YES;
         } break;
+        case kCGBitmapByteOrder16Little:
         case kCGBitmapByteOrder32Little: {
         } break;
+        case kCGBitmapByteOrder16Big:
         case kCGBitmapByteOrder32Big: {
             byteOrderNormal = YES;
         } break;
@@ -242,8 +251,15 @@ static inline UIColor * SDGetColorFromRGBA(Pixel_8888 pixel, CGBitmapInfo bitmap
         default:
             break;
     }
-    
+#if SD_MAC
+    // Mac supports ColorSync, to ensure the same bahvior, we convert color to sRGB
+    NSColorSpace *colorSpace = [[NSColorSpace alloc] initWithCGColorSpace:cgColorSpace];
+    CGFloat components[4] = {r, g, b, a};
+    NSColor *color = [NSColor colorWithColorSpace:colorSpace components:components count:4];
+    return [color colorUsingColorSpace:NSColorSpace.sRGBColorSpace];
+#else
     return [UIColor colorWithRed:r green:g blue:b alpha:a];
+#endif
 }
 
 #if SD_UIKIT || SD_MAC
@@ -558,6 +574,9 @@ static inline CGImageRef _Nullable SDCreateCGImageFromCIImage(CIImage * _Nonnull
         CGImageRelease(imageRef);
         return nil;
     }
+    // Get color space for transform
+    CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRef);
+    
     // greyscale
     if (components == 2) {
         Pixel_88 pixel = {0};
@@ -565,7 +584,7 @@ static inline CGImageRef _Nullable SDCreateCGImageFromCIImage(CIImage * _Nonnull
         CFRelease(data);
         CGImageRelease(imageRef);
         // Convert to color
-        return SDGetColorFromGrayscale(pixel, bitmapInfo);
+        return SDGetColorFromGrayscale(pixel, bitmapInfo, colorSpace);
     } else if (components == 3 || components == 4) {
         // RGB/RGBA
         Pixel_8888 pixel = {0};
@@ -573,7 +592,7 @@ static inline CGImageRef _Nullable SDCreateCGImageFromCIImage(CIImage * _Nonnull
         CFRelease(data);
         CGImageRelease(imageRef);
         // Convert to color
-        return SDGetColorFromRGBA(pixel, bitmapInfo);
+        return SDGetColorFromRGBA(pixel, bitmapInfo, colorSpace);
     } else {
         NSLog(@"Unsupported components: %zu", components);
         CFRelease(data);
@@ -637,6 +656,8 @@ static inline CGImageRef _Nullable SDCreateCGImageFromCIImage(CIImage * _Nonnull
     // Convert to color
     CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
     NSMutableArray<UIColor *> *colors = [NSMutableArray arrayWithCapacity:CGRectGetWidth(rect) * CGRectGetHeight(rect)];
+    // ColorSpace
+    CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRef);
     for (size_t index = start; index < end; index += components) {
         if (index >= row * bytesPerRow + col * components) {
             // Index beyond the end of current row, go next row
@@ -648,14 +669,14 @@ static inline CGImageRef _Nullable SDCreateCGImageFromCIImage(CIImage * _Nonnull
         UIColor *color;
         if (components == 2) {
             Pixel_88 pixel = {pixels[index], pixel[index+1]};
-            color = SDGetColorFromGrayscale(pixel, bitmapInfo);
+            color = SDGetColorFromGrayscale(pixel, bitmapInfo, colorSpace);
         } else {
             if (components == 3) {
                 Pixel_8888 pixel = {pixels[index], pixels[index+1], pixels[index+2], 0};
-                color = SDGetColorFromRGBA(pixel, bitmapInfo);
+                color = SDGetColorFromRGBA(pixel, bitmapInfo, colorSpace);
             } else if (components == 4) {
                 Pixel_8888 pixel = {pixels[index], pixels[index+1], pixels[index+2], pixels[index+3]};
-                color = SDGetColorFromRGBA(pixel, bitmapInfo);
+                color = SDGetColorFromRGBA(pixel, bitmapInfo, colorSpace);
             } else {
                 NSLog(@"Unsupported components: %zu", components);
             }

+ 13 - 0
Pods/SDWebImage/SDWebImage/Core/UIImageView+HighlightedWebCache.h

@@ -17,6 +17,13 @@
  */
 @interface UIImageView (HighlightedWebCache)
 
+#pragma mark - Highlighted Image
+
+/**
+ * Get the current highlighted image URL.
+ */
+@property (nonatomic, strong, readonly, nullable) NSURL *sd_currentHighlightedImageURL;
+
 /**
  * Set the imageView `highlightedImage` with an `url`.
  *
@@ -124,6 +131,12 @@
                              progress:(nullable SDImageLoaderProgressBlock)progressBlock
                             completed:(nullable SDExternalCompletionBlock)completedBlock;
 
+/**
+ * Cancel the current highlighted image load (for `UIImageView.highlighted`)
+ * @note For normal image, use `sd_cancelCurrentImageLoad`
+ */
+- (void)sd_cancelCurrentHighlightedImageLoad;
+
 @end
 
 #endif

+ 12 - 3
Pods/SDWebImage/SDWebImage/Core/UIImageView+HighlightedWebCache.m

@@ -11,11 +11,10 @@
 #if SD_UIKIT
 
 #import "UIView+WebCacheOperation.h"
+#import "UIView+WebCacheState.h"
 #import "UIView+WebCache.h"
 #import "SDInternalMacros.h"
 
-static NSString * const SDHighlightedImageOperationKey = @"UIImageViewImageOperationHighlighted";
-
 @implementation UIImageView (HighlightedWebCache)
 
 - (void)sd_setHighlightedImageWithURL:(nullable NSURL *)url {
@@ -54,7 +53,7 @@ static NSString * const SDHighlightedImageOperationKey = @"UIImageViewImageOpera
     } else {
         mutableContext = [NSMutableDictionary dictionary];
     }
-    mutableContext[SDWebImageContextSetImageOperationKey] = SDHighlightedImageOperationKey;
+    mutableContext[SDWebImageContextSetImageOperationKey] = @keypath(self, highlightedImage);
     [self sd_internalSetImageWithURL:url
                     placeholderImage:nil
                              options:options
@@ -71,6 +70,16 @@ static NSString * const SDHighlightedImageOperationKey = @"UIImageViewImageOpera
                            }];
 }
 
+#pragma mark - Highlighted State
+
+- (NSURL *)sd_currentHighlightedImageURL {
+    return [self sd_imageLoadStateForKey:@keypath(self, highlightedImage)].url;
+}
+
+- (void)sd_cancelCurrentHighlightedImageLoad {
+    return [self sd_cancelImageLoadOperationWithKey:@keypath(self, highlightedImage)];
+}
+
 @end
 
 #endif

+ 15 - 0
Pods/SDWebImage/SDWebImage/Core/UIImageView+WebCache.h

@@ -45,6 +45,15 @@
  */
 @interface UIImageView (WebCache)
 
+#pragma mark - Image State
+
+/**
+ * Get the current image URL.
+ */
+@property (nonatomic, strong, readonly, nullable) NSURL *sd_currentImageURL;
+
+#pragma mark - Image Loading
+
 /**
  * Set the imageView `image` with an `url`.
  *
@@ -191,4 +200,10 @@
                   progress:(nullable SDImageLoaderProgressBlock)progressBlock
                  completed:(nullable SDExternalCompletionBlock)completedBlock;
 
+/**
+ * Cancel the current normal image load (for `UIImageView.image`)
+ * @note For highlighted image, use `sd_cancelCurrentHighlightedImageLoad`
+ */
+- (void)sd_cancelCurrentImageLoad;
+
 @end

+ 11 - 0
Pods/SDWebImage/SDWebImage/Core/UIImageView+WebCache.m

@@ -9,6 +9,7 @@
 #import "UIImageView+WebCache.h"
 #import "objc/runtime.h"
 #import "UIView+WebCacheOperation.h"
+#import "UIView+WebCacheState.h"
 #import "UIView+WebCache.h"
 
 @implementation UIImageView (WebCache)
@@ -64,4 +65,14 @@
                            }];
 }
 
+#pragma mark - State
+
+- (NSURL *)sd_currentImageURL {
+    return [self sd_imageLoadStateForKey:nil].url;
+}
+
+- (void)sd_cancelCurrentImageLoad {
+    return [self sd_cancelImageLoadOperationWithKey:nil];
+}
+
 @end

+ 22 - 8
Pods/SDWebImage/SDWebImage/Core/UIView+WebCache.h

@@ -11,6 +11,8 @@
 #import "SDWebImageManager.h"
 #import "SDWebImageTransition.h"
 #import "SDWebImageIndicator.h"
+#import "UIView+WebCacheOperation.h"
+#import "UIView+WebCacheState.h"
 
 /**
  The value specify that the image progress unit count cannot be determined because the progressBlock is not been called.
@@ -24,27 +26,34 @@ typedef void(^SDSetImageBlock)(UIImage * _Nullable image, NSData * _Nullable ima
  */
 @interface UIView (WebCache)
 
-/**
- * Get the current image URL.
- *
- * @note Note that because of the limitations of categories this property can get out of sync if you use setImage: directly.
- */
-@property (nonatomic, strong, readonly, nullable) NSURL *sd_imageURL;
-
 /**
  * Get the current image operation key. Operation key is used to identify the different queries for one view instance (like UIButton).
  * See more about this in `SDWebImageContextSetImageOperationKey`.
- * If you cancel current image load, the key will be set to nil.
+ *
  * @note You can use method `UIView+WebCacheOperation` to investigate different queries' operation.
+ * @note For the history version compatible, when current UIView has property exactly called `image`, the operation key will use `NSStringFromClass(self.class)`. Include `UIImageView.image/NSImageView.image/NSButton.image` (without `UIButton`)
+ * @warning This property should be only used for single state view, like `UIImageView` without highlighted state. For stateful view like `UIBUtton` (one view can have multiple images loading), check their header to call correct API, like `-[UIButton sd_imageOperationKeyForState:]`
  */
 @property (nonatomic, strong, readonly, nullable) NSString *sd_latestOperationKey;
 
+#pragma mark - State
+
+/**
+ * Get the current image URL.
+ * This simply translate to `[self sd_imageLoadStateForKey:self.sd_latestOperationKey].url` from v5.18.0
+ *
+ * @note Note that because of the limitations of categories this property can get out of sync if you use setImage: directly.
+ * @warning This property should be only used for single state view, like `UIImageView` without highlighted state. For stateful view like `UIBUtton` (one view can have multiple images loading), use `sd_imageLoadStateForKey:` instead. See `UIView+WebCacheState.h` for more information.
+ */
+@property (nonatomic, strong, readonly, nullable) NSURL *sd_imageURL;
+
 /**
  * The current image loading progress associated to the view. The unit count is the received size and excepted size of download.
  * The `totalUnitCount` and `completedUnitCount` will be reset to 0 after a new image loading start (change from current queue). And they will be set to `SDWebImageProgressUnitCountUnknown` if the progressBlock not been called but the image loading success to mark the progress finished (change from main queue).
  * @note You can use Key-Value Observing on the progress, but you should take care that the change to progress is from a background queue during download(the same as progressBlock). If you want to using KVO and update the UI, make sure to dispatch on the main queue. And it's recommend to use some KVO libs like KVOController because it's more safe and easy to use.
  * @note The getter will create a progress instance if the value is nil. But by default, we don't create one. If you need to use Key-Value Observing, you must trigger the getter or set a custom progress instance before the loading start. The default value is nil.
  * @note Note that because of the limitations of categories this property can get out of sync if you update the progress directly.
+ * @warning This property should be only used for single state view, like `UIImageView` without highlighted state. For stateful view like `UIBUtton` (one view can have multiple images loading), use `sd_imageLoadStateForKey:` instead. See `UIView+WebCacheState.h` for more information.
  */
 @property (nonatomic, strong, null_resettable) NSProgress *sd_imageProgress;
 
@@ -83,6 +92,9 @@ typedef void(^SDSetImageBlock)(UIImage * _Nullable image, NSData * _Nullable ima
 
 /**
  * Cancel the current image load
+ * This simply translate to `[self sd_cancelImageLoadOperationWithKey:self.sd_latestOperationKey]` from v5.18.0
+ * 
+ * @warning This method should be only used for single state view, like `UIImageView` without highlighted state. For stateful view like `UIBUtton` (one view can have multiple images loading), use `sd_cancelImageLoadOperationWithKey:` instead. See `UIView+WebCacheOperation.h` for more information.
  */
 - (void)sd_cancelCurrentImageLoad;
 
@@ -93,6 +105,7 @@ typedef void(^SDSetImageBlock)(UIImage * _Nullable image, NSData * _Nullable ima
 /**
  The image transition when image load finished. See `SDWebImageTransition`.
  If you specify nil, do not do transition. Defaults to nil.
+ @warning This property should be only used for single state view, like `UIImageView` without highlighted state. For stateful view like `UIBUtton` (one view can have multiple images loading), write your own implementation in `setImageBlock:`, and check current stateful view's state to render the UI.
  */
 @property (nonatomic, strong, nullable) SDWebImageTransition *sd_imageTransition;
 
@@ -102,6 +115,7 @@ typedef void(^SDSetImageBlock)(UIImage * _Nullable image, NSData * _Nullable ima
  The image indicator during the image loading. If you do not need indicator, specify nil. Defaults to nil
  The setter will remove the old indicator view and add new indicator view to current view's subview.
  @note Because this is UI related, you should access only from the main queue.
+ @warning This property should be only used for single state view, like `UIImageView` without highlighted state. For stateful view like `UIBUtton` (one view can have multiple images loading), write your own implementation in `setImageBlock:`, and check current stateful view's state to render the UI.
  */
 @property (nonatomic, strong, nullable) id<SDWebImageIndicator> sd_imageIndicator;
 

+ 28 - 14
Pods/SDWebImage/SDWebImage/Core/UIView+WebCache.m

@@ -18,14 +18,6 @@ const int64_t SDWebImageProgressUnitCountUnknown = 1LL;
 
 @implementation UIView (WebCache)
 
-- (nullable NSURL *)sd_imageURL {
-    return objc_getAssociatedObject(self, @selector(sd_imageURL));
-}
-
-- (void)setSd_imageURL:(NSURL * _Nullable)sd_imageURL {
-    objc_setAssociatedObject(self, @selector(sd_imageURL), sd_imageURL, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
-}
-
 - (nullable NSString *)sd_latestOperationKey {
     return objc_getAssociatedObject(self, @selector(sd_latestOperationKey));
 }
@@ -34,8 +26,15 @@ const int64_t SDWebImageProgressUnitCountUnknown = 1LL;
     objc_setAssociatedObject(self, @selector(sd_latestOperationKey), sd_latestOperationKey, OBJC_ASSOCIATION_COPY_NONATOMIC);
 }
 
+#pragma mark - State
+
+- (NSURL *)sd_imageURL {
+    return [self sd_imageLoadStateForKey:self.sd_latestOperationKey].url;
+}
+
 - (NSProgress *)sd_imageProgress {
-    NSProgress *progress = objc_getAssociatedObject(self, @selector(sd_imageProgress));
+    SDWebImageLoadState *loadState = [self sd_imageLoadStateForKey:self.sd_latestOperationKey];
+    NSProgress *progress = loadState.progress;
     if (!progress) {
         progress = [[NSProgress alloc] initWithParent:nil userInfo:nil];
         self.sd_imageProgress = progress;
@@ -44,7 +43,15 @@ const int64_t SDWebImageProgressUnitCountUnknown = 1LL;
 }
 
 - (void)setSd_imageProgress:(NSProgress *)sd_imageProgress {
-    objc_setAssociatedObject(self, @selector(sd_imageProgress), sd_imageProgress, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+    if (!sd_imageProgress) {
+        return;
+    }
+    SDWebImageLoadState *loadState = [self sd_imageLoadStateForKey:self.sd_latestOperationKey];
+    if (!loadState) {
+        loadState = [SDWebImageLoadState new];
+    }
+    loadState.progress = sd_imageProgress;
+    [self sd_setImageLoadState:loadState forKey:self.sd_latestOperationKey];
 }
 
 - (nullable id<SDWebImageOperation>)sd_internalSetImageWithURL:(nullable NSURL *)url
@@ -69,8 +76,16 @@ const int64_t SDWebImageProgressUnitCountUnknown = 1LL;
         context = [mutableContext copy];
     }
     self.sd_latestOperationKey = validOperationKey;
-    [self sd_cancelImageLoadOperationWithKey:validOperationKey];
-    self.sd_imageURL = url;
+    if (!(SD_OPTIONS_CONTAINS(options, SDWebImageAvoidAutoCancelImage))) {
+        // cancel previous loading for the same set-image operation key by default
+        [self sd_cancelImageLoadOperationWithKey:validOperationKey];
+    }
+    SDWebImageLoadState *loadState = [self sd_imageLoadStateForKey:validOperationKey];
+    if (!loadState) {
+        loadState = [SDWebImageLoadState new];
+    }
+    loadState.url = url;
+    [self sd_setImageLoadState:loadState forKey:validOperationKey];
     
     SDWebImageManager *manager = context[SDWebImageContextCustomManager];
     if (!manager) {
@@ -103,7 +118,7 @@ const int64_t SDWebImageProgressUnitCountUnknown = 1LL;
     
     if (url) {
         // reset the progress
-        NSProgress *imageProgress = objc_getAssociatedObject(self, @selector(sd_imageProgress));
+        NSProgress *imageProgress = loadState.progress;
         if (imageProgress) {
             imageProgress.totalUnitCount = 0;
             imageProgress.completedUnitCount = 0;
@@ -242,7 +257,6 @@ const int64_t SDWebImageProgressUnitCountUnknown = 1LL;
 
 - (void)sd_cancelCurrentImageLoad {
     [self sd_cancelImageLoadOperationWithKey:self.sd_latestOperationKey];
-    self.sd_latestOperationKey = nil;
 }
 
 - (void)sd_setImage:(UIImage *)image imageData:(NSData *)imageData basedOnClassOrViaCustomSetImageBlock:(SDSetImageBlock)setImageBlock cacheType:(SDImageCacheType)cacheType imageURL:(NSURL *)imageURL {

+ 0 - 1
Pods/SDWebImage/SDWebImage/Core/UIView+WebCacheOperation.m

@@ -40,7 +40,6 @@ typedef NSMapTable<NSString *, id<SDWebImageOperation>> SDOperationsDictionary;
 
 - (void)sd_setImageLoadOperation:(nullable id<SDWebImageOperation>)operation forKey:(nullable NSString *)key {
     if (key) {
-        [self sd_cancelImageLoadOperationWithKey:key];
         if (operation) {
             SDOperationsDictionary *operationDictionary = [self sd_operationDictionary];
             @synchronized (self) {

+ 62 - 0
Pods/SDWebImage/SDWebImage/Core/UIView+WebCacheState.h

@@ -0,0 +1,62 @@
+/*
+ * This file is part of the SDWebImage package.
+ * (c) Olivier Poitrey <rs@dailymotion.com>
+ *
+ * For the full copyright and license information, please view the LICENSE
+ * file that was distributed with this source code.
+ */
+
+#import <Foundation/Foundation.h>
+#import "SDWebImageCompat.h"
+
+/**
+ A loading state to manage View Category which contains multiple states. Like UIImgeView.image && UIImageView.highlightedImage
+ @code
+ SDWebImageLoadState *loadState = [self sd_imageLoadStateForKey:@keypath(self, highlitedImage)];
+ NSProgress *highlitedImageProgress = loadState.progress;
+ @endcode
+ */
+@interface SDWebImageLoadState : NSObject
+
+/**
+ Image loading URL
+ */
+@property (nonatomic, strong, nullable) NSURL *url;
+/**
+ Image loading progress. The unit count is the received size and excepted size of download.
+ */
+@property (nonatomic, strong, nullable) NSProgress *progress;
+
+@end
+
+/**
+ These methods are used for WebCache view which have multiple states for image loading, for example, `UIButton` or `UIImageView.highlightedImage`
+ It maitain the state container for per-operation, make it possible for control and check each image loading operation's state.
+ @note For developer who want to add SDWebImage View Category support for their own stateful class, learn more on Wiki.
+ */
+@interface UIView (WebCacheState)
+
+/**
+ Get the image loading state container for specify operation key
+
+ @param key key for identifying the operations
+ @return The image loading state container
+ */
+- (nullable SDWebImageLoadState *)sd_imageLoadStateForKey:(nullable NSString *)key;
+
+/**
+ Set the image loading state container for specify operation key
+
+ @param state The image loading state container
+ @param key key for identifying the operations
+ */
+- (void)sd_setImageLoadState:(nullable SDWebImageLoadState *)state forKey:(nullable NSString *)key;
+
+/**
+ Rmove the image loading state container for specify operation key
+
+ @param key key for identifying the operations
+ */
+- (void)sd_removeImageLoadStateForKey:(nullable NSString *)key;
+
+@end

+ 56 - 0
Pods/SDWebImage/SDWebImage/Core/UIView+WebCacheState.m

@@ -0,0 +1,56 @@
+/*
+ * This file is part of the SDWebImage package.
+ * (c) Olivier Poitrey <rs@dailymotion.com>
+ *
+ * For the full copyright and license information, please view the LICENSE
+ * file that was distributed with this source code.
+ */
+
+#import "UIView+WebCacheState.h"
+#import "objc/runtime.h"
+
+typedef NSMutableDictionary<NSString *, SDWebImageLoadState *> SDStatesDictionary;
+
+@implementation SDWebImageLoadState
+
+@end
+
+@implementation UIView (WebCacheState)
+
+- (SDStatesDictionary *)sd_imageLoadStateDictionary {
+    SDStatesDictionary *states = objc_getAssociatedObject(self, @selector(sd_imageLoadStateDictionary));
+    if (!states) {
+        states = [NSMutableDictionary dictionary];
+        objc_setAssociatedObject(self, @selector(sd_imageLoadStateDictionary), states, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
+    }
+    return states;
+}
+
+- (SDWebImageLoadState *)sd_imageLoadStateForKey:(NSString *)key {
+    if (!key) {
+        key = NSStringFromClass(self.class);
+    }
+    @synchronized(self) {
+        return [self.sd_imageLoadStateDictionary objectForKey:key];
+    }
+}
+
+- (void)sd_setImageLoadState:(SDWebImageLoadState *)state forKey:(NSString *)key {
+    if (!key) {
+        key = NSStringFromClass(self.class);
+    }
+    @synchronized(self) {
+        self.sd_imageLoadStateDictionary[key] = state;
+    }
+}
+
+- (void)sd_removeImageLoadStateForKey:(NSString *)key {
+    if (!key) {
+        key = NSStringFromClass(self.class);
+    }
+    @synchronized(self) {
+        self.sd_imageLoadStateDictionary[key] = nil;
+    }
+}
+
+@end

+ 1 - 0
Pods/SDWebImage/SDWebImage/Private/SDDeviceHelper.m

@@ -8,6 +8,7 @@
 
 #import "SDDeviceHelper.h"
 #import <mach/mach.h>
+#import <sys/sysctl.h>
 
 @implementation SDDeviceHelper
 

+ 8 - 4
Pods/SDWebImage/SDWebImage/Private/SDDisplayLink.m

@@ -45,6 +45,7 @@ static CVReturn DisplayLinkCallback(CVDisplayLinkRef displayLink, const CVTimeSt
 - (void)dealloc {
 #if SD_MAC
     if (_displayLink) {
+        CVDisplayLinkStop(_displayLink);
         CVDisplayLinkRelease(_displayLink);
         _displayLink = NULL;
     }
@@ -62,14 +63,15 @@ static CVReturn DisplayLinkCallback(CVDisplayLinkRef displayLink, const CVTimeSt
     if (self) {
         _target = target;
         _selector = sel;
+        // CA/CV/NSTimer will retain to the target, we need to break this using weak proxy
+        SDWeakProxy *weakProxy = [SDWeakProxy proxyWithTarget:self];
 #if SD_MAC
         CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
-        CVDisplayLinkSetOutputCallback(_displayLink, DisplayLinkCallback, (__bridge void *)self);
+        // Simulate retain for target, the target is weak proxy to self
+        CVDisplayLinkSetOutputCallback(_displayLink, DisplayLinkCallback, (__bridge_retained void *)weakProxy);
 #elif SD_IOS || SD_TV
-        SDWeakProxy *weakProxy = [SDWeakProxy proxyWithTarget:self];
         _displayLink = [CADisplayLink displayLinkWithTarget:weakProxy selector:@selector(displayLinkDidRefresh:)];
 #else
-        SDWeakProxy *weakProxy = [SDWeakProxy proxyWithTarget:self];
         _displayLink = [NSTimer timerWithTimeInterval:kSDDisplayLinkInterval target:weakProxy selector:@selector(displayLinkDidRefresh:) userInfo:nil repeats:YES];
 #endif
     }
@@ -195,7 +197,7 @@ static CVReturn DisplayLinkCallback(CVDisplayLinkRef displayLink, const CVTimeSt
     self.displayLink.paused = NO;
 #else
     if (self.displayLink.isValid) {
-        [self.displayLink fire];
+        // Do nothing
     } else {
         SDWeakProxy *weakProxy = [SDWeakProxy proxyWithTarget:self];
         self.displayLink = [NSTimer timerWithTimeInterval:kSDDisplayLinkInterval target:weakProxy selector:@selector(displayLinkDidRefresh:) userInfo:nil repeats:YES];
@@ -238,7 +240,9 @@ static CVReturn DisplayLinkCallback(CVDisplayLinkRef displayLink, const CVTimeSt
 #if SD_MAC
 static CVReturn DisplayLinkCallback(CVDisplayLinkRef displayLink, const CVTimeStamp *inNow, const CVTimeStamp *inOutputTime, CVOptionFlags flagsIn, CVOptionFlags *flagsOut, void *displayLinkContext) {
     // CVDisplayLink callback is not on main queue
+    // Actually `SDWeakProxy` but not `SDDisplayLink`
     SDDisplayLink *object = (__bridge SDDisplayLink *)displayLinkContext;
+    if (!object) return kCVReturnSuccess;
     // CVDisplayLink does not use runloop, but we can provide similar behavior for modes
     // May use `default` runloop to avoid extra callback when in `eventTracking` (mouse drag, scroll) or `modalPanel` (modal panel)
     NSString *runloopMode = object.runloopMode;

+ 3 - 1
Pods/SDWebImage/SDWebImage/Private/SDImageAssetManager.m

@@ -13,7 +13,9 @@ static NSArray *SDBundlePreferredScales(void) {
     static NSArray *scales;
     static dispatch_once_t onceToken;
     dispatch_once(&onceToken, ^{
-#if SD_WATCH
+#if SD_VISION
+        CGFloat screenScale = UITraitCollection.currentTraitCollection.displayScale;
+#elif SD_WATCH
         CGFloat screenScale = [WKInterfaceDevice currentDevice].screenScale;
 #elif SD_UIKIT
         CGFloat screenScale = [UIScreen mainScreen].scale;

+ 40 - 0
Pods/SDWebImage/SDWebImage/Private/SDImageFramePool.h

@@ -0,0 +1,40 @@
+/*
+* This file is part of the SDWebImage package.
+* (c) Olivier Poitrey <rs@dailymotion.com>
+*
+* For the full copyright and license information, please view the LICENSE
+* file that was distributed with this source code.
+*/
+
+#import <Foundation/Foundation.h>
+#import "SDWebImageCompat.h"
+#import "SDImageCoder.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+/// A per-provider (provider means, AnimatedImage object) based frame pool, each player who use the same provider share the same frame buffer
+@interface SDImageFramePool : NSObject
+
+/// Register and return back a frame pool, also increase reference count
++ (instancetype)registerProvider:(id<SDAnimatedImageProvider>)provider;
+/// Unregister a frame pool, also decrease reference count, if zero dealloc the frame pool
++ (void)unregisterProvider:(id<SDAnimatedImageProvider>)provider;
+
+/// Prefetch the current frame, query using `frameAtIndex:` by caller to check whether finished.
+- (void)prefetchFrameAtIndex:(NSUInteger)index;
+
+/// Control the max buffer count for current frame pool, used for RAM/CPU balance, default unlimited
+@property (nonatomic, assign) NSUInteger maxBufferCount;
+/// Control the max concurrent fetch queue operation count, used for CPU balance, default 1
+@property (nonatomic, assign) NSUInteger maxConcurrentCount;
+
+// Frame Operations
+@property (nonatomic, readonly) NSUInteger currentFrameCount;
+- (nullable UIImage *)frameAtIndex:(NSUInteger)index;
+- (void)setFrame:(nullable UIImage *)frame atIndex:(NSUInteger)index;
+- (void)removeFrameAtIndex:(NSUInteger)index;
+- (void)removeAllFrames;
+
+NS_ASSUME_NONNULL_END
+
+@end

+ 164 - 0
Pods/SDWebImage/SDWebImage/Private/SDImageFramePool.m

@@ -0,0 +1,164 @@
+/*
+* This file is part of the SDWebImage package.
+* (c) Olivier Poitrey <rs@dailymotion.com>
+*
+* For the full copyright and license information, please view the LICENSE
+* file that was distributed with this source code.
+*/
+
+#import "SDImageFramePool.h"
+#import "SDInternalMacros.h"
+#import "objc/runtime.h"
+
+@interface SDImageFramePool ()
+
+@property (class, readonly) NSMapTable *providerFramePoolMap;
+
+@property (weak) id<SDAnimatedImageProvider> provider;
+@property (atomic) NSUInteger registerCount;
+
+@property (nonatomic, strong) NSMutableDictionary<NSNumber *, UIImage *> *frameBuffer;
+@property (nonatomic, strong) NSOperationQueue *fetchQueue;
+
+@end
+
+// Lock to ensure atomic behavior
+SD_LOCK_DECLARE_STATIC(_providerFramePoolMapLock);
+
+@implementation SDImageFramePool
+
++ (NSMapTable *)providerFramePoolMap {
+    static NSMapTable *providerFramePoolMap;
+    static dispatch_once_t onceToken;
+    dispatch_once(&onceToken, ^{
+        providerFramePoolMap = [NSMapTable mapTableWithKeyOptions:NSPointerFunctionsStrongMemory | NSPointerFunctionsObjectPointerPersonality valueOptions:NSPointerFunctionsStrongMemory | NSPointerFunctionsObjectPointerPersonality];
+    });
+    return providerFramePoolMap;
+}
+
+#pragma mark - Life Cycle
+- (instancetype)init {
+    self = [super init];
+    if (self) {
+        _frameBuffer = [NSMutableDictionary dictionary];
+        _fetchQueue = [[NSOperationQueue alloc] init];
+        _fetchQueue.maxConcurrentOperationCount = 1;
+        _fetchQueue.name = @"com.hackemist.SDImageFramePool.fetchQueue";
+#if SD_UIKIT
+        [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
+#endif
+    }
+    return self;
+}
+
+- (void)dealloc {
+#if SD_UIKIT
+    [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
+#endif
+}
+
+- (void)didReceiveMemoryWarning:(NSNotification *)notification {
+    [self removeAllFrames];
+}
+
++ (void)initialize {
+    // Lock to ensure atomic behavior
+    SD_LOCK_INIT(_providerFramePoolMapLock);
+}
+
++ (instancetype)registerProvider:(id<SDAnimatedImageProvider>)provider {
+    // Lock to ensure atomic behavior
+    SD_LOCK(_providerFramePoolMapLock);
+    SDImageFramePool *framePool = [self.providerFramePoolMap objectForKey:provider];
+    if (!framePool) {
+        framePool = [[SDImageFramePool alloc] init];
+        framePool.provider = provider;
+        [self.providerFramePoolMap setObject:framePool forKey:provider];
+    }
+    framePool.registerCount += 1;
+    SD_UNLOCK(_providerFramePoolMapLock);
+    return framePool;
+}
+
++ (void)unregisterProvider:(id<SDAnimatedImageProvider>)provider {
+    // Lock to ensure atomic behavior
+    SD_LOCK(_providerFramePoolMapLock);
+    SDImageFramePool *framePool = [self.providerFramePoolMap objectForKey:provider];
+    if (!framePool) {
+        SD_UNLOCK(_providerFramePoolMapLock);
+        return;
+    }
+    framePool.registerCount -= 1;
+    if (framePool.registerCount == 0) {
+        [self.providerFramePoolMap removeObjectForKey:provider];
+    }
+    SD_UNLOCK(_providerFramePoolMapLock);
+}
+
+- (void)prefetchFrameAtIndex:(NSUInteger)index {
+    @synchronized (self) {
+        NSUInteger frameCount = self.frameBuffer.count;
+        if (frameCount > self.maxBufferCount) {
+            // Remove the frame buffer if need
+            // TODO, use LRU or better algorithm to detect which frames to clear
+            self.frameBuffer[@(index - 1)] = nil;
+            self.frameBuffer[@(index + 1)] = nil;
+        }
+    }
+    
+    if (self.fetchQueue.operationCount == 0) {
+        // Prefetch next frame in background queue
+        id<SDAnimatedImageProvider> animatedProvider = self.provider;
+        @weakify(self);
+        NSOperation *operation = [NSBlockOperation blockOperationWithBlock:^{
+            @strongify(self);
+            if (!self) {
+                return;
+            }
+            UIImage *frame = [animatedProvider animatedImageFrameAtIndex:index];
+            
+            [self setFrame:frame atIndex:index];
+        }];
+        [self.fetchQueue addOperation:operation];
+    }
+}
+
+- (void)setMaxConcurrentCount:(NSUInteger)maxConcurrentCount {
+    self.fetchQueue.maxConcurrentOperationCount = maxConcurrentCount;
+}
+
+- (NSUInteger)currentFrameCount {
+    NSUInteger frameCount = 0;
+    @synchronized (self) {
+        frameCount = self.frameBuffer.count;
+    }
+    return frameCount;
+}
+
+- (void)setFrame:(UIImage *)frame atIndex:(NSUInteger)index {
+    @synchronized (self) {
+        self.frameBuffer[@(index)] = frame;
+    }
+}
+
+- (UIImage *)frameAtIndex:(NSUInteger)index {
+    UIImage *frame;
+    @synchronized (self) {
+        frame = self.frameBuffer[@(index)];
+    }
+    return frame;
+}
+
+- (void)removeFrameAtIndex:(NSUInteger)index {
+    @synchronized (self) {
+        self.frameBuffer[@(index)] = nil;
+    }
+}
+
+- (void)removeAllFrames {
+    @synchronized (self) {
+        [self.frameBuffer removeAllObjects];
+    }
+}
+
+@end

+ 65 - 0
Pods/SDWebImage/SDWebImage/Private/SDInternalMacros.h

@@ -120,3 +120,68 @@ extern "C" {
 #if defined(__cplusplus)
 }
 #endif
+
+/**
+ * \@keypath allows compile-time verification of key paths. Given a real object
+ * receiver and key path:
+ *
+ * @code
+
+NSString *UTF8StringPath = @keypath(str.lowercaseString.UTF8String);
+// => @"lowercaseString.UTF8String"
+
+NSString *versionPath = @keypath(NSObject, version);
+// => @"version"
+
+NSString *lowercaseStringPath = @keypath(NSString.new, lowercaseString);
+// => @"lowercaseString"
+
+ * @endcode
+ *
+ * ... the macro returns an \c NSString containing all but the first path
+ * component or argument (e.g., @"lowercaseString.UTF8String", @"version").
+ *
+ * In addition to simply creating a key path, this macro ensures that the key
+ * path is valid at compile-time (causing a syntax error if not), and supports
+ * refactoring, such that changing the name of the property will also update any
+ * uses of \@keypath.
+ */
+#define keypath(...) \
+    _Pragma("clang diagnostic push") \
+    _Pragma("clang diagnostic ignored \"-Warc-repeated-use-of-weak\"") \
+    (NO).boolValue ? ((NSString * _Nonnull)nil) : ((NSString * _Nonnull)@(cStringKeypath(__VA_ARGS__))) \
+    _Pragma("clang diagnostic pop") \
+
+#define cStringKeypath(...) \
+    metamacro_if_eq(1, metamacro_argcount(__VA_ARGS__))(keypath1(__VA_ARGS__))(keypath2(__VA_ARGS__))
+
+#define keypath1(PATH) \
+    (((void)(NO && ((void)PATH, NO)), \
+    ({ char *__extobjckeypath__ = strchr(# PATH, '.'); NSCAssert(__extobjckeypath__, @"Provided key path is invalid."); __extobjckeypath__ + 1; })))
+
+#define keypath2(OBJ, PATH) \
+    (((void)(NO && ((void)OBJ.PATH, NO)), # PATH))
+
+/**
+ * \@collectionKeypath allows compile-time verification of key paths across collections NSArray/NSSet etc. Given a real object
+ * receiver, collection object receiver and related keypaths:
+ *
+ * @code
+ 
+ NSString *employeesFirstNamePath = @collectionKeypath(department.employees, Employee.new, firstName)
+ // => @"employees.firstName"
+ 
+ NSString *employeesFirstNamePath = @collectionKeypath(Department.new, employees, Employee.new, firstName)
+ // => @"employees.firstName"
+
+ * @endcode
+ *
+ */
+#define collectionKeypath(...) \
+    metamacro_if_eq(3, metamacro_argcount(__VA_ARGS__))(collectionKeypath3(__VA_ARGS__))(collectionKeypath4(__VA_ARGS__))
+
+#define collectionKeypath3(PATH, COLLECTION_OBJECT, COLLECTION_PATH) \
+    (YES).boolValue ? (NSString * _Nonnull)@((const char * _Nonnull)[[NSString stringWithFormat:@"%s.%s", cStringKeypath(PATH), cStringKeypath(COLLECTION_OBJECT, COLLECTION_PATH)] UTF8String]) : (NSString * _Nonnull)nil
+
+#define collectionKeypath4(OBJ, PATH, COLLECTION_OBJECT, COLLECTION_PATH) \
+    (YES).boolValue ? (NSString * _Nonnull)@((const char * _Nonnull)[[NSString stringWithFormat:@"%s.%s", cStringKeypath(OBJ, PATH), cStringKeypath(COLLECTION_OBJECT, COLLECTION_PATH)] UTF8String]) : (NSString * _Nonnull)nil

+ 1 - 0
Pods/SDWebImage/WebImage/SDWebImage.h

@@ -40,6 +40,7 @@ FOUNDATION_EXPORT const unsigned char SDWebImageVersionString[];
 #import <SDWebImage/UIButton+WebCache.h>
 #import <SDWebImage/SDWebImagePrefetcher.h>
 #import <SDWebImage/UIView+WebCacheOperation.h>
+#import <SDWebImage/UIView+WebCacheState.h>
 #import <SDWebImage/UIImage+Metadata.h>
 #import <SDWebImage/UIImage+MultiFormat.h>
 #import <SDWebImage/UIImage+MemoryCacheCost.h>

+ 22 - 0
Pods/SDWebImageWebPCoder/README.md

@@ -168,6 +168,28 @@ let thumbnailSize = CGSize(width: 300, height: 300)
 let image = SDImageWebPCoder.shared.decodedImage(with: data, options: [.decodeThumbnailPixelSize: thumbnailSize])
 ```
 
+### Decoding with limit bytes (0.12.0+)
+
++ Objective-C
+
+```objective-c
+// WebP thumbnail image decoding
+NSData *webpData;
+NSUInteger limitBytes = 1024 * 1024; // 1MB
+UIImage *image = [[SDImageWebPCoder sharedCoder] decodedImageWithData:webpData options:@{SDImageCoderDecodeScaleDownLimitBytes : @(limitBytes)}];
+// The image pixel buffer is guaranteed to less than 1MB in RAM (may scale down or full size), suitable for large image
+```
+
++ Swift
+
+```swift
+// WebP thumbnail image decoding
+let webpData: Data
+let limitBytes = 1024 * 1024 // 1MB
+let image = SDImageWebPCoder.shared.decodedImage(with: data, options: [.decodeScaleDownLimitBytes: limitBytes])
+// The image pixel buffer is guaranteed to less than 1MB in RAM (may scale down or full size), suitable for large image
+```
+
 ### Encoding
 
 + Objective-c

+ 174 - 23
Pods/SDWebImageWebPCoder/SDWebImageWebPCoder/Classes/SDImageWebPCoder.m

@@ -71,8 +71,8 @@ else OSSpinLockUnlock(&lock##_deprecated);
 /// Used for animated WebP, which need a canvas for decoding (rendering), possible apply a scale transform for thumbnail decoding (avoiding post-rescale using vImage)
 /// See more in #73
 static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canvasSize, CGSize thumbnailSize, BOOL preserveAspectRatio) {
-    CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
-    bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
+    // From SDWebImage v5.17.0, use runtime detection of bitmap info instead of hardcode.
+    CGBitmapInfo bitmapInfo = [SDImageCoderHelper preferredPixelFormat:hasAlpha].bitmapInfo;
     // Check whether we need to use thumbnail
     CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(canvasSize.width, canvasSize.height) scaleSize:thumbnailSize preserveAspectRatio:preserveAspectRatio shouldScaleUp:NO];
     CGContextRef canvas = CGBitmapContextCreate(NULL, scaledSize.width, scaledSize.height, 8, 0, [SDImageCoderHelper colorSpaceGetDeviceRGB], bitmapInfo);
@@ -88,6 +88,89 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
     return canvas;
 }
 
+WEBP_CSP_MODE ConvertCSPMode(CGBitmapInfo bitmapInfo) {
+    // Get alpha info, byteOrder info
+    CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
+    CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
+    BOOL byteOrderNormal = NO;
+    switch (byteOrderInfo) {
+        case kCGBitmapByteOrderDefault: {
+            byteOrderNormal = YES;
+        } break;
+        case kCGBitmapByteOrder32Little: {
+        } break;
+        case kCGBitmapByteOrder32Big: {
+            byteOrderNormal = YES;
+        } break;
+        default: break;
+    }
+    switch (alphaInfo) {
+        case kCGImageAlphaPremultipliedFirst: {
+            if (byteOrderNormal) {
+                // ARGB8888, premultiplied
+                return MODE_Argb;
+            } else {
+                // BGRA8888, premultiplied
+                return MODE_bgrA;
+            }
+        }
+            break;
+        case kCGImageAlphaPremultipliedLast: {
+            if (byteOrderNormal) {
+                // RGBA8888, premultiplied
+                return MODE_rgbA;
+            } else {
+                // ABGR8888, premultiplied
+                // Unsupported!
+                return MODE_LAST;
+            }
+        }
+            break;
+        case kCGImageAlphaNone: {
+            if (byteOrderNormal) {
+                // RGB
+                return MODE_RGB;
+            } else {
+                // BGR
+                return MODE_BGR;
+            }
+        }
+            break;
+        case kCGImageAlphaLast:
+        case kCGImageAlphaNoneSkipLast: {
+            if (byteOrderNormal) {
+                // RGBA or RGBX
+                return MODE_RGBA;
+            } else {
+                // ABGR or XBGR
+                // Unsupported!
+                return MODE_LAST;
+            }
+        }
+            break;
+        case kCGImageAlphaFirst:
+        case kCGImageAlphaNoneSkipFirst: {
+            if (byteOrderNormal) {
+                // ARGB or XRGB
+                return MODE_ARGB;
+            } else {
+                // BGRA or BGRX
+                return MODE_BGRA;
+            }
+        }
+            break;
+        case kCGImageAlphaOnly: {
+            // A
+            // Unsupported
+            return MODE_LAST;
+        }
+            break;
+        default:
+            break;
+    }
+    return MODE_LAST;
+}
+
 @interface SDWebPCoderFrame : NSObject
 
 @property (nonatomic, assign) NSUInteger index; // Frame index (zero based)
@@ -126,6 +209,7 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
     NSUInteger _currentBlendIndex;
     BOOL _preserveAspectRatio;
     CGSize _thumbnailSize;
+    BOOL _limitBytes;
 }
 
 - (void)dealloc {
@@ -218,6 +302,24 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
     CGColorSpaceRef colorSpace = [self sd_createColorSpaceWithDemuxer:demuxer];
     int canvasWidth = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_WIDTH);
     int canvasHeight = WebPDemuxGetI(demuxer, WEBP_FF_CANVAS_HEIGHT);
+    uint32_t frameCount = WebPDemuxGetI(demuxer, WEBP_FF_FRAME_COUNT);
+    int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
+    
+    NSUInteger limitBytes = 0;
+    NSNumber *limitBytesValue = options[SDImageCoderDecodeScaleDownLimitBytes];
+    if (limitBytesValue != nil) {
+        limitBytes = limitBytesValue.unsignedIntegerValue;
+    }
+    // Scale down to limit bytes if need
+    if (limitBytes > 0) {
+        // Hack 32 BitsPerPixel
+        CGSize imageSize = CGSizeMake(canvasWidth, canvasHeight);
+        CGSize framePixelSize = [SDImageCoderHelper scaledSizeWithImageSize:imageSize limitBytes:limitBytes bytesPerPixel:4 frameCount:frameCount];
+        // Override thumbnail size
+        thumbnailSize = framePixelSize;
+        preserveAspectRatio = YES;
+    }
+    
     // Check whether we need to use thumbnail
     CGSize scaledSize = [SDImageCoderHelper scaledSizeWithImageSize:CGSizeMake(canvasWidth, canvasHeight) scaleSize:thumbnailSize preserveAspectRatio:preserveAspectRatio shouldScaleUp:NO];
     
@@ -245,7 +347,6 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
         return nil;
     }
     
-    int loopCount = WebPDemuxGetI(demuxer, WEBP_FF_LOOP_COUNT);
     NSMutableArray<SDImageFrame *> *frames = [NSMutableArray array];
     
     do {
@@ -285,8 +386,8 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
 - (instancetype)initIncrementalWithOptions:(nullable SDImageCoderOptions *)options {
     self = [super init];
     if (self) {
-        // Progressive images need transparent, so always use premultiplied BGRA
-        _idec = WebPINewRGB(MODE_bgrA, NULL, 0, 0);
+        // Progressive images need transparent, so always use premultiplied RGBA
+        _idec = WebPINewRGB(MODE_rgbA, NULL, 0, 0);
         CGFloat scale = 1;
         NSNumber *scaleFactor = options[SDImageCoderDecodeScaleFactor];
         if (scaleFactor != nil) {
@@ -312,6 +413,12 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
             preserveAspectRatio = preserveAspectRatioValue.boolValue;
         }
         _preserveAspectRatio = preserveAspectRatio;
+        NSUInteger limitBytes = 0;
+        NSNumber *limitBytesValue = options[SDImageCoderDecodeScaleDownLimitBytes];
+        if (limitBytesValue != nil) {
+            limitBytes = limitBytesValue.unsignedIntegerValue;
+        }
+        _limitBytes = limitBytes;
         _currentBlendIndex = NSNotFound;
         SD_LOCK_INIT(_lock);
     }
@@ -352,6 +459,15 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
         [self scanAndCheckFramesValidWithDemuxer:_demux];
     }
     SD_UNLOCK(_lock);
+    // Scale down to limit bytes if need
+    if (_limitBytes > 0) {
+        // Hack 32 BitsPerPixel
+        CGSize imageSize = CGSizeMake(_canvasWidth, _canvasHeight);
+        CGSize framePixelSize = [SDImageCoderHelper scaledSizeWithImageSize:imageSize limitBytes:_limitBytes bytesPerPixel:4 frameCount:_frameCount];
+        // Override thumbnail size
+        _thumbnailSize = framePixelSize;
+        _preserveAspectRatio = YES;
+    }
 }
 
 - (UIImage *)incrementalDecodedImageWithOptions:(SDImageCoderOptions *)options {
@@ -381,9 +497,10 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
         CGDataProviderRef provider =
         CGDataProviderCreateWithData(NULL, rgba, rgbaSize, NULL);
         CGColorSpaceRef colorSpaceRef = [SDImageCoderHelper colorSpaceGetDeviceRGB];
-        
-        CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaPremultipliedFirst;
+        // Because _idec use MODE_rgbA
+        CGBitmapInfo bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedLast;
         size_t components = 4;
+        BOOL shouldInterpolate = YES;
         CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
         // Why to use last_y for image height is because of libwebp's bug (https://bugs.chromium.org/p/webp/issues/detail?id=362)
         // It will not keep memory barrier safe on x86 architechure (macOS & iPhone simulator) but on ARM architecture (iPhone & iPad & tv & watch) it works great
@@ -391,7 +508,7 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
         // So this will cause our drawed image looks strange(above is the current part but below is the previous part)
         // We only grab the last_y height and draw the last_y height instead of total height image
         // Besides fix, this can enhance performance since we do not need to create extra bitmap
-        CGImageRef imageRef = CGImageCreate(width, last_y, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
+        CGImageRef imageRef = CGImageCreate(width, last_y, 8, components * 8, components * width, colorSpaceRef, bitmapInfo, provider, NULL, shouldInterpolate, renderingIntent);
         
         CGDataProviderRelease(provider);
         
@@ -499,20 +616,45 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
     }
     
     BOOL hasAlpha = config.input.has_alpha;
-    // iOS prefer BGRA8888 (premultiplied) or BGRX8888 bitmapInfo for screen rendering, which is same as `UIGraphicsBeginImageContext()` or `- [CALayer drawInContext:]`
-    // use this bitmapInfo, combined with right colorspace, even without decode, can still avoid extra CA::Render::copy_image(which marked `Color Copied Images` from Instruments)
-    CGBitmapInfo bitmapInfo = kCGBitmapByteOrder32Host;
-    bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNoneSkipFirst;
+    // From SDWebImage v5.17.0, use runtime detection of bitmap info instead of hardcode.
+    SDImagePixelFormat pixelFormat = [SDImageCoderHelper preferredPixelFormat:hasAlpha];
+    CGBitmapInfo bitmapInfo = pixelFormat.bitmapInfo;
+    WEBP_CSP_MODE mode = ConvertCSPMode(bitmapInfo);
+    if (mode == MODE_LAST) {
+        NSAssert(NO, @"Unsupported libwebp preferred CGBitmapInfo: %d", bitmapInfo);
+        return nil;
+    }
+    config.output.colorspace = mode;
     config.options.use_threads = 1;
-    config.output.colorspace = MODE_bgrA;
+    
     
     // Use scaling for thumbnail
+    size_t width = config.input.width;
+    size_t height = config.input.height;
     if (scaledSize.width != 0 && scaledSize.height != 0) {
         config.options.use_scaling = 1;
         config.options.scaled_width = scaledSize.width;
         config.options.scaled_height = scaledSize.height;
+        width = scaledSize.width;
+        height = scaledSize.height;
     }
     
+    // We alloc the buffer and do byte alignment by ourself. libwebp defaults does not byte alignment to `bitsPerPixel`, which cause the CoreAnimation unhappy and always trigger the `CA::Render::copy_image`
+    size_t bitsPerComponent = 8;
+    size_t components = (mode == MODE_RGB || mode == MODE_BGR) ? 3 : 4; // Actually always 4
+    size_t bitsPerPixel = bitsPerComponent * components;
+    // Read: https://github.com/path/FastImageCache#byte-alignment
+    // A properly aligned bytes-per-row value must be a multiple of 8 pixels × bytes per pixel
+    // For a typical ARGB image, the aligned bytes-per-row value is a multiple of 64.
+    size_t alignment = pixelFormat.alignment;
+    size_t bytesPerRow = SDByteAlign(width * (bitsPerPixel / 8), alignment);
+    
+    void *rgba = WebPMalloc(bytesPerRow * height);
+    config.output.is_external_memory = 1;
+    config.output.u.RGBA.rgba = rgba;
+    config.output.u.RGBA.stride = (int)bytesPerRow;
+    config.output.u.RGBA.size = height * bytesPerRow;
+    
     // Decode the WebP image data into a RGBA value array
     if (WebPDecode(webpData.bytes, webpData.size, &config) != VP8_STATUS_OK) {
         return nil;
@@ -521,13 +663,9 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
     // Construct a UIImage from the decoded RGBA value array
     CGDataProviderRef provider =
     CGDataProviderCreateWithData(NULL, config.output.u.RGBA.rgba, config.output.u.RGBA.size, FreeImageData);
-    size_t bitsPerComponent = 8;
-    size_t bitsPerPixel = 32;
-    size_t bytesPerRow = config.output.u.RGBA.stride;
-    size_t width = config.output.width;
-    size_t height = config.output.height;
+    BOOL shouldInterpolate = YES;
     CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
-    CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
+    CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, bytesPerRow, colorSpaceRef, bitmapInfo, provider, NULL, shouldInterpolate, renderingIntent);
     
     CGDataProviderRelease(provider);
     
@@ -709,9 +847,6 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
     }
     
     size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
-    size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef);
-    size_t bitsPerPixel = CGImageGetBitsPerPixel(imageRef);
-    size_t components = bitsPerPixel / bitsPerComponent;
     CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
     CGImageAlphaInfo alphaInfo = bitmapInfo & kCGBitmapAlphaInfoMask;
     CGBitmapInfo byteOrderInfo = bitmapInfo & kCGBitmapByteOrderMask;
@@ -844,7 +979,7 @@ static inline CGContextRef _Nullable CreateWebPCanvas(BOOL hasAlpha, CGSize canv
 }
 
 static void FreeImageData(void *info, const void *data, size_t size) {
-    free((void *)data);
+    WebPFree((void *)data);
 }
 
 static int GetIntValueForKey(NSDictionary * _Nonnull dictionary, NSString * _Nonnull key, int defaultValue) {
@@ -911,6 +1046,21 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
             preserveAspectRatio = preserveAspectRatioValue.boolValue;
         }
         _preserveAspectRatio = preserveAspectRatio;
+        NSUInteger limitBytes = 0;
+        NSNumber *limitBytesValue = options[SDImageCoderDecodeScaleDownLimitBytes];
+        if (limitBytesValue != nil) {
+            limitBytes = limitBytesValue.unsignedIntegerValue;
+        }
+        _limitBytes = limitBytes;
+        // Scale down to limit bytes if need
+        if (_limitBytes > 0) {
+            // Hack 32 BitsPerPixel
+            CGSize imageSize = CGSizeMake(_canvasWidth, _canvasHeight);
+            CGSize framePixelSize = [SDImageCoderHelper scaledSizeWithImageSize:imageSize limitBytes:_limitBytes bytesPerPixel:4 frameCount:_frameCount];
+            // Override thumbnail size
+            _thumbnailSize = framePixelSize;
+            _preserveAspectRatio = YES;
+        }
         _scale = scale;
         _demux = demuxer;
         _imageData = data;
@@ -1174,6 +1324,7 @@ static float GetFloatValueForKey(NSDictionary * _Nonnull dictionary, NSString *
 #else
     image = [[UIImage alloc] initWithCGImage:imageRef scale:_scale orientation:kCGImagePropertyOrientationUp];
 #endif
+    image.sd_imageFormat = SDImageFormatWebP;
     CGImageRelease(imageRef);
     
     WebPDemuxReleaseIterator(&iter);

+ 1 - 1
Pods/Target Support Files/Pods-DcShare/Pods-DcShare.debug.xcconfig

@@ -4,7 +4,7 @@ GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 $(inherited) SD_WEBP=1
 HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImage/SDWebImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImageWebPCoder/SDWebImageWebPCoder.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/libwebp/libwebp.framework/Headers"
 LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
 OTHER_CFLAGS = $(inherited) -isystem "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImage/SDWebImage.framework/Headers" -isystem "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImageWebPCoder/SDWebImageWebPCoder.framework/Headers" -isystem "${PODS_CONFIGURATION_BUILD_DIR}/libwebp/libwebp.framework/Headers" -iframework "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImage" -iframework "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImageWebPCoder" -iframework "${PODS_CONFIGURATION_BUILD_DIR}/libwebp"
-OTHER_LDFLAGS = $(inherited) -framework "ImageIO" -framework "SDWebImage" -framework "SDWebImageWebPCoder" -framework "libwebp"
+OTHER_LDFLAGS = $(inherited) -framework "CoreGraphics" -framework "ImageIO" -framework "SDWebImage" -framework "SDWebImageWebPCoder" -framework "libwebp"
 PODS_BUILD_DIR = ${BUILD_DIR}
 PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
 PODS_PODFILE_DIR_PATH = ${SRCROOT}/.

+ 1 - 1
Pods/Target Support Files/Pods-DcShare/Pods-DcShare.release.xcconfig

@@ -4,7 +4,7 @@ GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 $(inherited) SD_WEBP=1
 HEADER_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImage/SDWebImage.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImageWebPCoder/SDWebImageWebPCoder.framework/Headers" "${PODS_CONFIGURATION_BUILD_DIR}/libwebp/libwebp.framework/Headers"
 LD_RUNPATH_SEARCH_PATHS = $(inherited) '@executable_path/Frameworks' '@loader_path/Frameworks' '@executable_path/../../Frameworks'
 OTHER_CFLAGS = $(inherited) -isystem "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImage/SDWebImage.framework/Headers" -isystem "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImageWebPCoder/SDWebImageWebPCoder.framework/Headers" -isystem "${PODS_CONFIGURATION_BUILD_DIR}/libwebp/libwebp.framework/Headers" -iframework "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImage" -iframework "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImageWebPCoder" -iframework "${PODS_CONFIGURATION_BUILD_DIR}/libwebp"
-OTHER_LDFLAGS = $(inherited) -framework "ImageIO" -framework "SDWebImage" -framework "SDWebImageWebPCoder" -framework "libwebp"
+OTHER_LDFLAGS = $(inherited) -framework "CoreGraphics" -framework "ImageIO" -framework "SDWebImage" -framework "SDWebImageWebPCoder" -framework "libwebp"
 PODS_BUILD_DIR = ${BUILD_DIR}
 PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
 PODS_PODFILE_DIR_PATH = ${SRCROOT}/.

+ 1 - 1
Pods/Target Support Files/SDWebImage/SDWebImage-Info.plist

@@ -15,7 +15,7 @@
   <key>CFBundlePackageType</key>
   <string>FMWK</string>
   <key>CFBundleShortVersionString</key>
-  <string>5.15.6</string>
+  <string>5.18.2</string>
   <key>CFBundleSignature</key>
   <string>????</string>
   <key>CFBundleVersion</key>

+ 1 - 0
Pods/Target Support Files/SDWebImage/SDWebImage-umbrella.h

@@ -69,6 +69,7 @@
 #import "UIImageView+WebCache.h"
 #import "UIView+WebCache.h"
 #import "UIView+WebCacheOperation.h"
+#import "UIView+WebCacheState.h"
 #import "SDWebImage.h"
 
 FOUNDATION_EXPORT double SDWebImageVersionNumber;

+ 1 - 1
Pods/Target Support Files/SDWebImageWebPCoder/SDWebImageWebPCoder-Info.plist

@@ -15,7 +15,7 @@
   <key>CFBundlePackageType</key>
   <string>FMWK</string>
   <key>CFBundleShortVersionString</key>
-  <string>0.11.0</string>
+  <string>0.13.0</string>
   <key>CFBundleSignature</key>
   <string>????</string>
   <key>CFBundleVersion</key>

+ 1 - 1
Pods/Target Support Files/SDWebImageWebPCoder/SDWebImageWebPCoder.debug.xcconfig

@@ -3,7 +3,7 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
 CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/SDWebImageWebPCoder
 FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImage" "${PODS_CONFIGURATION_BUILD_DIR}/libwebp"
 GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 $(inherited) SD_WEBP=1
-OTHER_LDFLAGS = $(inherited) -framework "ImageIO" -framework "SDWebImage" -framework "libwebp"
+OTHER_LDFLAGS = $(inherited) -framework "CoreGraphics" -framework "ImageIO" -framework "SDWebImage" -framework "libwebp"
 PODS_BUILD_DIR = ${BUILD_DIR}
 PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
 PODS_DEVELOPMENT_LANGUAGE = ${DEVELOPMENT_LANGUAGE}

+ 1 - 1
Pods/Target Support Files/SDWebImageWebPCoder/SDWebImageWebPCoder.release.xcconfig

@@ -3,7 +3,7 @@ CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = NO
 CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/SDWebImageWebPCoder
 FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_CONFIGURATION_BUILD_DIR}/SDWebImage" "${PODS_CONFIGURATION_BUILD_DIR}/libwebp"
 GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1 $(inherited) SD_WEBP=1
-OTHER_LDFLAGS = $(inherited) -framework "ImageIO" -framework "SDWebImage" -framework "libwebp"
+OTHER_LDFLAGS = $(inherited) -framework "CoreGraphics" -framework "ImageIO" -framework "SDWebImage" -framework "libwebp"
 PODS_BUILD_DIR = ${BUILD_DIR}
 PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
 PODS_DEVELOPMENT_LANGUAGE = ${DEVELOPMENT_LANGUAGE}

+ 1 - 1
Pods/Target Support Files/libwebp/libwebp-Info.plist

@@ -15,7 +15,7 @@
   <key>CFBundlePackageType</key>
   <string>FMWK</string>
   <key>CFBundleShortVersionString</key>
-  <string>1.2.4</string>
+  <string>1.3.2</string>
   <key>CFBundleSignature</key>
   <string>????</string>
   <key>CFBundleVersion</key>

+ 1 - 0
Pods/Target Support Files/libwebp/libwebp-umbrella.h

@@ -12,6 +12,7 @@
 
 #import "demux.h"
 #import "mux.h"
+#import "sharpyuv.h"
 #import "decode.h"
 #import "encode.h"
 #import "types.h"

+ 1 - 1
Pods/libwebp/README.md

@@ -7,7 +7,7 @@
       \__\__/\____/\_____/__/ ____  ___
             / _/ /    \    \ /  _ \/ _/
            /  \_/   / /   \ \   __/  \__
-           \____/____/\_____/_____/____/v1.2.4
+           \____/____/\_____/_____/____/v1.3.2
 ```
 
 WebP codec is a library to encode and decode images in WebP format. This package

+ 12 - 5
Pods/libwebp/sharpyuv/Makefile.am

@@ -1,13 +1,19 @@
 AM_CPPFLAGS += -I$(top_builddir) -I$(top_srcdir)
 AM_CPPFLAGS += -I$(top_builddir)/src -I$(top_srcdir)/src
+
+lib_LTLIBRARIES = libsharpyuv.la
+
 noinst_LTLIBRARIES =
-noinst_LTLIBRARIES += libsharpyuv.la
 noinst_LTLIBRARIES += libsharpyuv_sse2.la
 noinst_LTLIBRARIES += libsharpyuv_neon.la
 
+libsharpyuvinclude_HEADERS =
+libsharpyuvinclude_HEADERS += sharpyuv.h
+libsharpyuvinclude_HEADERS += sharpyuv_csp.h
 noinst_HEADERS =
-noinst_HEADERS += ../src/webp/types.h
+noinst_HEADERS += ../src/dsp/cpu.c
 noinst_HEADERS += ../src/dsp/cpu.h
+noinst_HEADERS += ../src/webp/types.h
 
 libsharpyuv_sse2_la_SOURCES =
 libsharpyuv_sse2_la_SOURCES += sharpyuv_sse2.c
@@ -20,15 +26,16 @@ libsharpyuv_neon_la_CPPFLAGS = $(libsharpyuv_la_CPPFLAGS)
 libsharpyuv_neon_la_CFLAGS = $(AM_CFLAGS) $(NEON_FLAGS)
 
 libsharpyuv_la_SOURCES =
+libsharpyuv_la_SOURCES += sharpyuv_cpu.c sharpyuv_cpu.h
 libsharpyuv_la_SOURCES += sharpyuv_csp.c sharpyuv_csp.h
 libsharpyuv_la_SOURCES += sharpyuv_dsp.c sharpyuv_dsp.h
 libsharpyuv_la_SOURCES += sharpyuv_gamma.c sharpyuv_gamma.h
 libsharpyuv_la_SOURCES += sharpyuv.c sharpyuv.h
 
 libsharpyuv_la_CPPFLAGS = $(AM_CPPFLAGS)
-libsharpyuv_la_LDFLAGS =
+libsharpyuv_la_LDFLAGS = -no-undefined -version-info 0:1:0 -lm
 libsharpyuv_la_LIBADD =
 libsharpyuv_la_LIBADD += libsharpyuv_sse2.la
 libsharpyuv_la_LIBADD += libsharpyuv_neon.la
-
-noinst_PROGRAMS =
+libsharpyuvincludedir = $(includedir)/webp/sharpyuv
+pkgconfig_DATA = libsharpyuv.pc

+ 11 - 0
Pods/libwebp/sharpyuv/libsharpyuv.pc.in

@@ -0,0 +1,11 @@
+prefix=@prefix@
+exec_prefix=@exec_prefix@
+libdir=@libdir@
+includedir=@includedir@/webp
+
+Name: libsharpyuv
+Description: Library for sharp RGB to YUV conversion
+Version: @PACKAGE_VERSION@
+Cflags: -I${includedir}
+Libs: -L${libdir} -l@webp_libname_prefix@sharpyuv
+Libs.private: -lm @PTHREAD_CFLAGS@ @PTHREAD_LIBS@

+ 41 - 0
Pods/libwebp/sharpyuv/libsharpyuv.rc

@@ -0,0 +1,41 @@
+#define APSTUDIO_READONLY_SYMBOLS
+#include "winres.h"
+#undef APSTUDIO_READONLY_SYMBOLS
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION 0,0,2,1
+ PRODUCTVERSION 0,0,2,1
+ FILEFLAGSMASK 0x3fL
+#ifdef _DEBUG
+ FILEFLAGS 0x1L
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS 0x40004L
+ FILETYPE 0x2L
+ FILESUBTYPE 0x0L
+BEGIN
+    BLOCK "StringFileInfo"
+    BEGIN
+        BLOCK "040904b0"
+        BEGIN
+            VALUE "CompanyName", "Google, Inc."
+            VALUE "FileDescription", "libsharpyuv DLL"
+            VALUE "FileVersion", "0.2.1"
+            VALUE "InternalName", "libsharpyuv.dll"
+            VALUE "LegalCopyright", "Copyright (C) 2023"
+            VALUE "OriginalFilename", "libsharpyuv.dll"
+            VALUE "ProductName", "SharpYuv Library"
+            VALUE "ProductVersion", "0.2.1"
+        END
+    END
+    BLOCK "VarFileInfo"
+    BEGIN
+        VALUE "Translation", 0x409, 1200
+    END
+END
+
+#endif    // English (United States) resources

+ 44 - 15
Pods/libwebp/sharpyuv/sharpyuv.c

@@ -15,15 +15,21 @@
 
 #include <assert.h>
 #include <limits.h>
-#include <math.h>
+#include <stddef.h>
 #include <stdlib.h>
 #include <string.h>
 
 #include "src/webp/types.h"
-#include "src/dsp/cpu.h"
+#include "sharpyuv/sharpyuv_cpu.h"
 #include "sharpyuv/sharpyuv_dsp.h"
 #include "sharpyuv/sharpyuv_gamma.h"
 
+//------------------------------------------------------------------------------
+
+int SharpYuvGetVersion(void) {
+  return SHARPYUV_VERSION;
+}
+
 //------------------------------------------------------------------------------
 // Sharp RGB->YUV conversion
 
@@ -414,24 +420,46 @@ static int DoSharpArgbToYuv(const uint8_t* r_ptr, const uint8_t* g_ptr,
 }
 #undef SAFE_ALLOC
 
+#if defined(WEBP_USE_THREAD) && !defined(_WIN32)
+#include <pthread.h>  // NOLINT
+
+#define LOCK_ACCESS \
+    static pthread_mutex_t sharpyuv_lock = PTHREAD_MUTEX_INITIALIZER; \
+    if (pthread_mutex_lock(&sharpyuv_lock)) return
+#define UNLOCK_ACCESS_AND_RETURN                  \
+    do {                                          \
+      (void)pthread_mutex_unlock(&sharpyuv_lock); \
+      return;                                     \
+    } while (0)
+#else  // !(defined(WEBP_USE_THREAD) && !defined(_WIN32))
+#define LOCK_ACCESS do {} while (0)
+#define UNLOCK_ACCESS_AND_RETURN return
+#endif  // defined(WEBP_USE_THREAD) && !defined(_WIN32)
+
 // Hidden exported init function.
-// By default SharpYuvConvert calls it with NULL. If needed, users can declare
-// it as extern and call it with a VP8CPUInfo function.
-extern void SharpYuvInit(VP8CPUInfo cpu_info_func);
+// By default SharpYuvConvert calls it with SharpYuvGetCPUInfo. If needed,
+// users can declare it as extern and call it with an alternate VP8CPUInfo
+// function.
+extern VP8CPUInfo SharpYuvGetCPUInfo;
+SHARPYUV_EXTERN void SharpYuvInit(VP8CPUInfo cpu_info_func);
 void SharpYuvInit(VP8CPUInfo cpu_info_func) {
   static volatile VP8CPUInfo sharpyuv_last_cpuinfo_used =
       (VP8CPUInfo)&sharpyuv_last_cpuinfo_used;
-  const int initialized =
-      (sharpyuv_last_cpuinfo_used != (VP8CPUInfo)&sharpyuv_last_cpuinfo_used);
-  if (cpu_info_func == NULL && initialized) return;
-  if (sharpyuv_last_cpuinfo_used == cpu_info_func) return;
-
-  SharpYuvInitDsp(cpu_info_func);
-  if (!initialized) {
-    SharpYuvInitGammaTables();
+  LOCK_ACCESS;
+  // Only update SharpYuvGetCPUInfo when called from external code to avoid a
+  // race on reading the value in SharpYuvConvert().
+  if (cpu_info_func != (VP8CPUInfo)&SharpYuvGetCPUInfo) {
+    SharpYuvGetCPUInfo = cpu_info_func;
+  }
+  if (sharpyuv_last_cpuinfo_used == SharpYuvGetCPUInfo) {
+    UNLOCK_ACCESS_AND_RETURN;
   }
 
-  sharpyuv_last_cpuinfo_used = cpu_info_func;
+  SharpYuvInitDsp();
+  SharpYuvInitGammaTables();
+
+  sharpyuv_last_cpuinfo_used = SharpYuvGetCPUInfo;
+  UNLOCK_ACCESS_AND_RETURN;
 }
 
 int SharpYuvConvert(const void* r_ptr, const void* g_ptr,
@@ -467,7 +495,8 @@ int SharpYuvConvert(const void* r_ptr, const void* g_ptr,
     // Stride should be even for uint16_t buffers.
     return 0;
   }
-  SharpYuvInit(NULL);
+  // The address of the function pointer is used to avoid a read race.
+  SharpYuvInit((VP8CPUInfo)&SharpYuvGetCPUInfo);
 
   // Add scaling factor to go from rgb_bit_depth to yuv_bit_depth, to the
   // rgb->yuv conversion matrix.

+ 31 - 9
Pods/libwebp/sharpyuv/sharpyuv.h

@@ -12,16 +12,32 @@
 #ifndef WEBP_SHARPYUV_SHARPYUV_H_
 #define WEBP_SHARPYUV_SHARPYUV_H_
 
-#include <inttypes.h>
-
 #ifdef __cplusplus
 extern "C" {
 #endif
 
+#ifndef SHARPYUV_EXTERN
+#ifdef WEBP_EXTERN
+#define SHARPYUV_EXTERN WEBP_EXTERN
+#else
+// This explicitly marks library functions and allows for changing the
+// signature for e.g., Windows DLL builds.
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define SHARPYUV_EXTERN extern __attribute__((visibility("default")))
+#else
+#if defined(_MSC_VER) && defined(WEBP_DLL)
+#define SHARPYUV_EXTERN __declspec(dllexport)
+#else
+#define SHARPYUV_EXTERN extern
+#endif /* _MSC_VER && WEBP_DLL */
+#endif /* __GNUC__ >= 4 */
+#endif /* WEBP_EXTERN */
+#endif /* SHARPYUV_EXTERN */
+
 // SharpYUV API version following the convention from semver.org
 #define SHARPYUV_VERSION_MAJOR 0
-#define SHARPYUV_VERSION_MINOR 1
-#define SHARPYUV_VERSION_PATCH 0
+#define SHARPYUV_VERSION_MINOR 2
+#define SHARPYUV_VERSION_PATCH 1
 // Version as a uint32_t. The major number is the high 8 bits.
 // The minor number is the middle 8 bits. The patch number is the low 16 bits.
 #define SHARPYUV_MAKE_VERSION(MAJOR, MINOR, PATCH) \
@@ -30,6 +46,10 @@ extern "C" {
   SHARPYUV_MAKE_VERSION(SHARPYUV_VERSION_MAJOR, SHARPYUV_VERSION_MINOR, \
                         SHARPYUV_VERSION_PATCH)
 
+// Returns the library's version number, packed in hexadecimal. See
+// SHARPYUV_VERSION.
+SHARPYUV_EXTERN int SharpYuvGetVersion(void);
+
 // RGB to YUV conversion matrix, in 16 bit fixed point.
 // y = rgb_to_y[0] * r + rgb_to_y[1] * g + rgb_to_y[2] * b + rgb_to_y[3]
 // u = rgb_to_u[0] * r + rgb_to_u[1] * g + rgb_to_u[2] * b + rgb_to_u[3]
@@ -65,11 +85,13 @@ typedef struct {
 //     adjacent pixels on the y, u and v channels. If yuv_bit_depth > 8, they
 //     should be multiples of 2.
 // width, height: width and height of the image in pixels
-int SharpYuvConvert(const void* r_ptr, const void* g_ptr, const void* b_ptr,
-                    int rgb_step, int rgb_stride, int rgb_bit_depth,
-                    void* y_ptr, int y_stride, void* u_ptr, int u_stride,
-                    void* v_ptr, int v_stride, int yuv_bit_depth, int width,
-                    int height, const SharpYuvConversionMatrix* yuv_matrix);
+SHARPYUV_EXTERN int SharpYuvConvert(const void* r_ptr, const void* g_ptr,
+                                    const void* b_ptr, int rgb_step,
+                                    int rgb_stride, int rgb_bit_depth,
+                                    void* y_ptr, int y_stride, void* u_ptr,
+                                    int u_stride, void* v_ptr, int v_stride,
+                                    int yuv_bit_depth, int width, int height,
+                                    const SharpYuvConversionMatrix* yuv_matrix);
 
 // TODO(b/194336375): Add YUV444 to YUV420 conversion. Maybe also add 422
 // support (it's rarely used in practice, especially for images).

+ 14 - 0
Pods/libwebp/sharpyuv/sharpyuv_cpu.c

@@ -0,0 +1,14 @@
+// Copyright 2022 Google Inc. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the COPYING file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+// -----------------------------------------------------------------------------
+//
+#include "sharpyuv/sharpyuv_cpu.h"
+
+// Include src/dsp/cpu.c to create SharpYuvGetCPUInfo from VP8GetCPUInfo. The
+// function pointer is renamed in sharpyuv_cpu.h.
+#include "src/dsp/cpu.c"

+ 22 - 0
Pods/libwebp/sharpyuv/sharpyuv_cpu.h

@@ -0,0 +1,22 @@
+// Copyright 2022 Google Inc. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the COPYING file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+// -----------------------------------------------------------------------------
+//
+#ifndef WEBP_SHARPYUV_SHARPYUV_CPU_H_
+#define WEBP_SHARPYUV_SHARPYUV_CPU_H_
+
+#include "sharpyuv/sharpyuv.h"
+
+// Avoid exporting SharpYuvGetCPUInfo in shared object / DLL builds.
+// SharpYuvInit() replaces the use of the function pointer.
+#undef WEBP_EXTERN
+#define WEBP_EXTERN extern
+#define VP8GetCPUInfo SharpYuvGetCPUInfo
+#include "src/dsp/cpu.h"
+
+#endif  // WEBP_SHARPYUV_SHARPYUV_CPU_H_

+ 1 - 1
Pods/libwebp/sharpyuv/sharpyuv_csp.c

@@ -13,7 +13,7 @@
 
 #include <assert.h>
 #include <math.h>
-#include <string.h>
+#include <stddef.h>
 
 static int ToFixed16(float f) { return (int)floor(f * (1 << 16) + 0.5f); }
 

+ 4 - 3
Pods/libwebp/sharpyuv/sharpyuv_csp.h

@@ -35,8 +35,9 @@ typedef struct {
 } SharpYuvColorSpace;
 
 // Fills in 'matrix' for the given YUVColorSpace.
-void SharpYuvComputeConversionMatrix(const SharpYuvColorSpace* yuv_color_space,
-                                     SharpYuvConversionMatrix* matrix);
+SHARPYUV_EXTERN void SharpYuvComputeConversionMatrix(
+    const SharpYuvColorSpace* yuv_color_space,
+    SharpYuvConversionMatrix* matrix);
 
 // Enums for precomputed conversion matrices.
 typedef enum {
@@ -49,7 +50,7 @@ typedef enum {
 } SharpYuvMatrixType;
 
 // Returns a pointer to a matrix for one of the predefined colorspaces.
-const SharpYuvConversionMatrix* SharpYuvGetConversionMatrix(
+SHARPYUV_EXTERN const SharpYuvConversionMatrix* SharpYuvGetConversionMatrix(
     SharpYuvMatrixType matrix_type);
 
 #ifdef __cplusplus

+ 10 - 8
Pods/libwebp/sharpyuv/sharpyuv_dsp.c

@@ -16,7 +16,7 @@
 #include <assert.h>
 #include <stdlib.h>
 
-#include "src/dsp/cpu.h"
+#include "sharpyuv/sharpyuv_cpu.h"
 
 //-----------------------------------------------------------------------------
 
@@ -72,26 +72,28 @@ void (*SharpYuvFilterRow)(const int16_t* A, const int16_t* B, int len,
                           const uint16_t* best_y, uint16_t* out,
                           int bit_depth);
 
+extern VP8CPUInfo SharpYuvGetCPUInfo;
 extern void InitSharpYuvSSE2(void);
 extern void InitSharpYuvNEON(void);
 
-void SharpYuvInitDsp(VP8CPUInfo cpu_info_func) {
-  (void)cpu_info_func;
-
+void SharpYuvInitDsp(void) {
 #if !WEBP_NEON_OMIT_C_CODE
   SharpYuvUpdateY = SharpYuvUpdateY_C;
   SharpYuvUpdateRGB = SharpYuvUpdateRGB_C;
   SharpYuvFilterRow = SharpYuvFilterRow_C;
 #endif
 
+  if (SharpYuvGetCPUInfo != NULL) {
 #if defined(WEBP_HAVE_SSE2)
-  if (cpu_info_func == NULL || cpu_info_func(kSSE2)) {
-    InitSharpYuvSSE2();
-  }
+    if (SharpYuvGetCPUInfo(kSSE2)) {
+      InitSharpYuvSSE2();
+    }
 #endif  // WEBP_HAVE_SSE2
+  }
 
 #if defined(WEBP_HAVE_NEON)
-  if (WEBP_NEON_OMIT_C_CODE || cpu_info_func == NULL || cpu_info_func(kNEON)) {
+  if (WEBP_NEON_OMIT_C_CODE ||
+      (SharpYuvGetCPUInfo != NULL && SharpYuvGetCPUInfo(kNEON))) {
     InitSharpYuvNEON();
   }
 #endif  // WEBP_HAVE_NEON

+ 3 - 4
Pods/libwebp/sharpyuv/sharpyuv_dsp.h

@@ -12,9 +12,8 @@
 #ifndef WEBP_SHARPYUV_SHARPYUV_DSP_H_
 #define WEBP_SHARPYUV_SHARPYUV_DSP_H_
 
-#include <stdint.h>
-
-#include "src/dsp/cpu.h"
+#include "sharpyuv/sharpyuv_cpu.h"
+#include "src/webp/types.h"
 
 extern uint64_t (*SharpYuvUpdateY)(const uint16_t* src, const uint16_t* ref,
                                    uint16_t* dst, int len, int bit_depth);
@@ -24,6 +23,6 @@ extern void (*SharpYuvFilterRow)(const int16_t* A, const int16_t* B, int len,
                                  const uint16_t* best_y, uint16_t* out,
                                  int bit_depth);
 
-void SharpYuvInitDsp(VP8CPUInfo cpu_info_func);
+void SharpYuvInitDsp(void);
 
 #endif  // WEBP_SHARPYUV_SHARPYUV_DSP_H_

+ 0 - 1
Pods/libwebp/sharpyuv/sharpyuv_gamma.c

@@ -13,7 +13,6 @@
 
 #include <assert.h>
 #include <math.h>
-#include <stdint.h>
 
 #include "src/webp/types.h"
 

+ 1 - 1
Pods/libwebp/sharpyuv/sharpyuv_gamma.h

@@ -12,7 +12,7 @@
 #ifndef WEBP_SHARPYUV_SHARPYUV_GAMMA_H_
 #define WEBP_SHARPYUV_SHARPYUV_GAMMA_H_
 
-#include <stdint.h>
+#include "src/webp/types.h"
 
 #ifdef __cplusplus
 extern "C" {

+ 4 - 5
Pods/libwebp/sharpyuv/sharpyuv_neon.c

@@ -17,11 +17,6 @@
 #include <assert.h>
 #include <stdlib.h>
 #include <arm_neon.h>
-#endif
-
-extern void InitSharpYuvNEON(void);
-
-#if defined(WEBP_USE_NEON)
 
 static uint16_t clip_NEON(int v, int max) {
   return (v < 0) ? 0 : (v > max) ? max : (uint16_t)v;
@@ -169,6 +164,8 @@ static void SharpYuvFilterRow_NEON(const int16_t* A, const int16_t* B, int len,
 
 //------------------------------------------------------------------------------
 
+extern void InitSharpYuvNEON(void);
+
 WEBP_TSAN_IGNORE_FUNCTION void InitSharpYuvNEON(void) {
   SharpYuvUpdateY = SharpYuvUpdateY_NEON;
   SharpYuvUpdateRGB = SharpYuvUpdateRGB_NEON;
@@ -177,6 +174,8 @@ WEBP_TSAN_IGNORE_FUNCTION void InitSharpYuvNEON(void) {
 
 #else  // !WEBP_USE_NEON
 
+extern void InitSharpYuvNEON(void);
+
 void InitSharpYuvNEON(void) {}
 
 #endif  // WEBP_USE_NEON

+ 2 - 5
Pods/libwebp/sharpyuv/sharpyuv_sse2.c

@@ -16,11 +16,6 @@
 #if defined(WEBP_USE_SSE2)
 #include <stdlib.h>
 #include <emmintrin.h>
-#endif
-
-extern void InitSharpYuvSSE2(void);
-
-#if defined(WEBP_USE_SSE2)
 
 static uint16_t clip_SSE2(int v, int max) {
   return (v < 0) ? 0 : (v > max) ? max : (uint16_t)v;
@@ -199,6 +194,8 @@ WEBP_TSAN_IGNORE_FUNCTION void InitSharpYuvSSE2(void) {
 }
 #else  // !WEBP_USE_SSE2
 
+extern void InitSharpYuvSSE2(void);
+
 void InitSharpYuvSSE2(void) {}
 
 #endif  // WEBP_USE_SSE2

+ 2 - 2
Pods/libwebp/src/Makefile.am

@@ -36,7 +36,7 @@ libwebp_la_LIBADD += utils/libwebputils.la
 # other than the ones listed on the command line, i.e., after linking, it will
 # not have unresolved symbols. Some platforms (Windows among them) require all
 # symbols in shared libraries to be resolved at library creation.
-libwebp_la_LDFLAGS = -no-undefined -version-info 8:5:1
+libwebp_la_LDFLAGS = -no-undefined -version-info 8:8:1
 libwebpincludedir = $(includedir)/webp
 pkgconfig_DATA = libwebp.pc
 
@@ -48,7 +48,7 @@ if BUILD_LIBWEBPDECODER
   libwebpdecoder_la_LIBADD += dsp/libwebpdspdecode.la
   libwebpdecoder_la_LIBADD += utils/libwebputilsdecode.la
 
-  libwebpdecoder_la_LDFLAGS = -no-undefined -version-info 4:5:1
+  libwebpdecoder_la_LDFLAGS = -no-undefined -version-info 4:8:1
   pkgconfig_DATA += libwebpdecoder.pc
 endif
 

+ 2 - 1
Pods/libwebp/src/dec/tree_dec.c

@@ -12,10 +12,11 @@
 // Author: Skal (pascal.massimino@gmail.com)
 
 #include "src/dec/vp8i_dec.h"
+#include "src/dsp/cpu.h"
 #include "src/utils/bit_reader_inl_utils.h"
 
 #if !defined(USE_GENERIC_TREE)
-#if !defined(__arm__) && !defined(_M_ARM) && !defined(__aarch64__)
+#if !defined(__arm__) && !defined(_M_ARM) && !WEBP_AARCH64
 // using a table is ~1-2% slower on ARM. Prefer the coded-tree approach then.
 #define USE_GENERIC_TREE 1   // ALTERNATE_CODE
 #else

+ 2 - 0
Pods/libwebp/src/dec/vp8_dec.c

@@ -494,6 +494,8 @@ static int GetCoeffsAlt(VP8BitReader* const br,
   return 16;
 }
 
+extern VP8CPUInfo VP8GetCPUInfo;
+
 WEBP_DSP_INIT_FUNC(InitGetCoeffs) {
   if (VP8GetCPUInfo != NULL && VP8GetCPUInfo(kSlowSSSE3)) {
     GetCoeffs = GetCoeffsAlt;

+ 2 - 2
Pods/libwebp/src/dec/vp8i_dec.h

@@ -31,8 +31,8 @@ extern "C" {
 
 // version numbers
 #define DEC_MAJ_VERSION 1
-#define DEC_MIN_VERSION 2
-#define DEC_REV_VERSION 4
+#define DEC_MIN_VERSION 3
+#define DEC_REV_VERSION 2
 
 // YUV-cache parameters. Cache is 32-bytes wide (= one cacheline).
 // Constraints are: We need to store one 16x16 block of luma samples (y),

+ 26 - 22
Pods/libwebp/src/dec/vp8l_dec.c

@@ -253,11 +253,11 @@ static int ReadHuffmanCodeLengths(
   int symbol;
   int max_symbol;
   int prev_code_len = DEFAULT_CODE_LENGTH;
-  HuffmanCode table[1 << LENGTHS_TABLE_BITS];
+  HuffmanTables tables;
 
-  if (!VP8LBuildHuffmanTable(table, LENGTHS_TABLE_BITS,
-                             code_length_code_lengths,
-                             NUM_CODE_LENGTH_CODES)) {
+  if (!VP8LHuffmanTablesAllocate(1 << LENGTHS_TABLE_BITS, &tables) ||
+      !VP8LBuildHuffmanTable(&tables, LENGTHS_TABLE_BITS,
+                             code_length_code_lengths, NUM_CODE_LENGTH_CODES)) {
     goto End;
   }
 
@@ -277,7 +277,7 @@ static int ReadHuffmanCodeLengths(
     int code_len;
     if (max_symbol-- == 0) break;
     VP8LFillBitWindow(br);
-    p = &table[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK];
+    p = &tables.curr_segment->start[VP8LPrefetchBits(br) & LENGTHS_TABLE_MASK];
     VP8LSetBitPos(br, br->bit_pos_ + p->bits);
     code_len = p->value;
     if (code_len < kCodeLengthLiterals) {
@@ -300,6 +300,7 @@ static int ReadHuffmanCodeLengths(
   ok = 1;
 
  End:
+  VP8LHuffmanTablesDeallocate(&tables);
   if (!ok) dec->status_ = VP8_STATUS_BITSTREAM_ERROR;
   return ok;
 }
@@ -307,7 +308,8 @@ static int ReadHuffmanCodeLengths(
 // 'code_lengths' is pre-allocated temporary buffer, used for creating Huffman
 // tree.
 static int ReadHuffmanCode(int alphabet_size, VP8LDecoder* const dec,
-                           int* const code_lengths, HuffmanCode* const table) {
+                           int* const code_lengths,
+                           HuffmanTables* const table) {
   int ok = 0;
   int size = 0;
   VP8LBitReader* const br = &dec->br_;
@@ -362,8 +364,7 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
   VP8LMetadata* const hdr = &dec->hdr_;
   uint32_t* huffman_image = NULL;
   HTreeGroup* htree_groups = NULL;
-  HuffmanCode* huffman_tables = NULL;
-  HuffmanCode* huffman_table = NULL;
+  HuffmanTables* huffman_tables = &hdr->huffman_tables_;
   int num_htree_groups = 1;
   int num_htree_groups_max = 1;
   int max_alphabet_size = 0;
@@ -372,6 +373,10 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
   int* mapping = NULL;
   int ok = 0;
 
+  // Check the table has been 0 initialized (through InitMetadata).
+  assert(huffman_tables->root.start == NULL);
+  assert(huffman_tables->curr_segment == NULL);
+
   if (allow_recursion && VP8LReadBits(br, 1)) {
     // use meta Huffman codes.
     const int huffman_precision = VP8LReadBits(br, 3) + 2;
@@ -434,16 +439,15 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
 
   code_lengths = (int*)WebPSafeCalloc((uint64_t)max_alphabet_size,
                                       sizeof(*code_lengths));
-  huffman_tables = (HuffmanCode*)WebPSafeMalloc(num_htree_groups * table_size,
-                                                sizeof(*huffman_tables));
   htree_groups = VP8LHtreeGroupsNew(num_htree_groups);
 
-  if (htree_groups == NULL || code_lengths == NULL || huffman_tables == NULL) {
+  if (htree_groups == NULL || code_lengths == NULL ||
+      !VP8LHuffmanTablesAllocate(num_htree_groups * table_size,
+                                 huffman_tables)) {
     dec->status_ = VP8_STATUS_OUT_OF_MEMORY;
     goto Error;
   }
 
-  huffman_table = huffman_tables;
   for (i = 0; i < num_htree_groups_max; ++i) {
     // If the index "i" is unused in the Huffman image, just make sure the
     // coefficients are valid but do not store them.
@@ -468,19 +472,20 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
       int max_bits = 0;
       for (j = 0; j < HUFFMAN_CODES_PER_META_CODE; ++j) {
         int alphabet_size = kAlphabetSize[j];
-        htrees[j] = huffman_table;
         if (j == 0 && color_cache_bits > 0) {
           alphabet_size += (1 << color_cache_bits);
         }
-        size = ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_table);
+        size =
+            ReadHuffmanCode(alphabet_size, dec, code_lengths, huffman_tables);
+        htrees[j] = huffman_tables->curr_segment->curr_table;
         if (size == 0) {
           goto Error;
         }
         if (is_trivial_literal && kLiteralMap[j] == 1) {
-          is_trivial_literal = (huffman_table->bits == 0);
+          is_trivial_literal = (htrees[j]->bits == 0);
         }
-        total_size += huffman_table->bits;
-        huffman_table += size;
+        total_size += htrees[j]->bits;
+        huffman_tables->curr_segment->curr_table += size;
         if (j <= ALPHA) {
           int local_max_bits = code_lengths[0];
           int k;
@@ -515,14 +520,13 @@ static int ReadHuffmanCodes(VP8LDecoder* const dec, int xsize, int ysize,
   hdr->huffman_image_ = huffman_image;
   hdr->num_htree_groups_ = num_htree_groups;
   hdr->htree_groups_ = htree_groups;
-  hdr->huffman_tables_ = huffman_tables;
 
  Error:
   WebPSafeFree(code_lengths);
   WebPSafeFree(mapping);
   if (!ok) {
     WebPSafeFree(huffman_image);
-    WebPSafeFree(huffman_tables);
+    VP8LHuffmanTablesDeallocate(huffman_tables);
     VP8LHtreeGroupsFree(htree_groups);
   }
   return ok;
@@ -1336,7 +1340,7 @@ static int ReadTransform(int* const xsize, int const* ysize,
        ok = ok && ExpandColorMap(num_colors, transform);
       break;
     }
-    case SUBTRACT_GREEN:
+    case SUBTRACT_GREEN_TRANSFORM:
       break;
     default:
       assert(0);    // can't happen
@@ -1358,7 +1362,7 @@ static void ClearMetadata(VP8LMetadata* const hdr) {
   assert(hdr != NULL);
 
   WebPSafeFree(hdr->huffman_image_);
-  WebPSafeFree(hdr->huffman_tables_);
+  VP8LHuffmanTablesDeallocate(&hdr->huffman_tables_);
   VP8LHtreeGroupsFree(hdr->htree_groups_);
   VP8LColorCacheClear(&hdr->color_cache_);
   VP8LColorCacheClear(&hdr->saved_color_cache_);
@@ -1673,7 +1677,7 @@ int VP8LDecodeImage(VP8LDecoder* const dec) {
 
   if (dec == NULL) return 0;
 
-  assert(dec->hdr_.huffman_tables_ != NULL);
+  assert(dec->hdr_.huffman_tables_.root.start != NULL);
   assert(dec->hdr_.htree_groups_ != NULL);
   assert(dec->hdr_.num_htree_groups_ > 0);
 

+ 1 - 1
Pods/libwebp/src/dec/vp8li_dec.h

@@ -51,7 +51,7 @@ typedef struct {
   uint32_t*       huffman_image_;
   int             num_htree_groups_;
   HTreeGroup*     htree_groups_;
-  HuffmanCode*    huffman_tables_;
+  HuffmanTables   huffman_tables_;
 } VP8LMetadata;
 
 typedef struct VP8LDecoder VP8LDecoder;

+ 21 - 14
Pods/libwebp/src/dec/webp_dec.c

@@ -179,7 +179,7 @@ static VP8StatusCode ParseOptionalChunks(const uint8_t** const data,
       return VP8_STATUS_BITSTREAM_ERROR;          // Not a valid chunk size.
     }
     // For odd-sized chunk-payload, there's one byte padding at the end.
-    disk_chunk_size = (CHUNK_HEADER_SIZE + chunk_size + 1) & ~1;
+    disk_chunk_size = (CHUNK_HEADER_SIZE + chunk_size + 1) & ~1u;
     total_size += disk_chunk_size;
 
     // Check that total bytes skipped so far does not exceed riff_size.
@@ -658,19 +658,26 @@ uint8_t* WebPDecodeBGRA(const uint8_t* data, size_t data_size,
 uint8_t* WebPDecodeYUV(const uint8_t* data, size_t data_size,
                        int* width, int* height, uint8_t** u, uint8_t** v,
                        int* stride, int* uv_stride) {
-  WebPDecBuffer output;   // only to preserve the side-infos
-  uint8_t* const out = Decode(MODE_YUV, data, data_size,
-                              width, height, &output);
-
-  if (out != NULL) {
-    const WebPYUVABuffer* const buf = &output.u.YUVA;
-    *u = buf->u;
-    *v = buf->v;
-    *stride = buf->y_stride;
-    *uv_stride = buf->u_stride;
-    assert(buf->u_stride == buf->v_stride);
-  }
-  return out;
+  // data, width and height are checked by Decode().
+  if (u == NULL || v == NULL || stride == NULL || uv_stride == NULL) {
+    return NULL;
+  }
+
+  {
+    WebPDecBuffer output;   // only to preserve the side-infos
+    uint8_t* const out = Decode(MODE_YUV, data, data_size,
+                                width, height, &output);
+
+    if (out != NULL) {
+      const WebPYUVABuffer* const buf = &output.u.YUVA;
+      *u = buf->u;
+      *v = buf->v;
+      *stride = buf->y_stride;
+      *uv_stride = buf->u_stride;
+      assert(buf->u_stride == buf->v_stride);
+    }
+    return out;
+  }
 }
 
 static void DefaultFeatures(WebPBitstreamFeatures* const features) {

+ 1 - 1
Pods/libwebp/src/demux/Makefile.am

@@ -13,6 +13,6 @@ noinst_HEADERS =
 noinst_HEADERS += ../webp/format_constants.h
 
 libwebpdemux_la_LIBADD = ../libwebp.la
-libwebpdemux_la_LDFLAGS = -no-undefined -version-info 2:11:0
+libwebpdemux_la_LDFLAGS = -no-undefined -version-info 2:14:0
 libwebpdemuxincludedir = $(includedir)/webp
 pkgconfig_DATA = libwebpdemux.pc

+ 2 - 2
Pods/libwebp/src/demux/demux.c

@@ -24,8 +24,8 @@
 #include "src/webp/format_constants.h"
 
 #define DMUX_MAJ_VERSION 1
-#define DMUX_MIN_VERSION 2
-#define DMUX_REV_VERSION 4
+#define DMUX_MIN_VERSION 3
+#define DMUX_REV_VERSION 2
 
 typedef struct {
   size_t start_;        // start location of the data

+ 2 - 2
Pods/libwebp/src/demux/libwebpdemux.pc.in

@@ -6,6 +6,6 @@ includedir=@includedir@
 Name: libwebpdemux
 Description: Library for parsing the WebP graphics format container
 Version: @PACKAGE_VERSION@
-Requires: libwebp >= 0.2.0
+Requires.private: libwebp >= 0.2.0
 Cflags: -I${includedir}
-Libs: -L${libdir} -lwebpdemux
+Libs: -L${libdir} -l@webp_libname_prefix@webpdemux

+ 5 - 5
Pods/libwebp/src/demux/libwebpdemux.rc

@@ -6,8 +6,8 @@
 LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
 
 VS_VERSION_INFO VERSIONINFO
- FILEVERSION 1,0,2,4
- PRODUCTVERSION 1,0,2,4
+ FILEVERSION 1,0,3,2
+ PRODUCTVERSION 1,0,3,2
  FILEFLAGSMASK 0x3fL
 #ifdef _DEBUG
  FILEFLAGS 0x1L
@@ -24,12 +24,12 @@ BEGIN
         BEGIN
             VALUE "CompanyName", "Google, Inc."
             VALUE "FileDescription", "libwebpdemux DLL"
-            VALUE "FileVersion", "1.2.4"
+            VALUE "FileVersion", "1.3.2"
             VALUE "InternalName", "libwebpdemux.dll"
-            VALUE "LegalCopyright", "Copyright (C) 2022"
+            VALUE "LegalCopyright", "Copyright (C) 2023"
             VALUE "OriginalFilename", "libwebpdemux.dll"
             VALUE "ProductName", "WebP Image Demuxer"
-            VALUE "ProductVersion", "1.2.4"
+            VALUE "ProductVersion", "1.3.2"
         END
     END
     BLOCK "VarFileInfo"

+ 1 - 0
Pods/libwebp/src/dsp/alpha_processing.c

@@ -425,6 +425,7 @@ void (*WebPAlphaReplace)(uint32_t* src, int length, uint32_t color);
 //------------------------------------------------------------------------------
 // Init function
 
+extern VP8CPUInfo VP8GetCPUInfo;
 extern void WebPInitAlphaProcessingMIPSdspR2(void);
 extern void WebPInitAlphaProcessingSSE2(void);
 extern void WebPInitAlphaProcessingSSE41(void);

+ 6 - 6
Pods/libwebp/src/dsp/alpha_processing_sse2.c

@@ -26,8 +26,8 @@ static int DispatchAlpha_SSE2(const uint8_t* WEBP_RESTRICT alpha,
   uint32_t alpha_and = 0xff;
   int i, j;
   const __m128i zero = _mm_setzero_si128();
-  const __m128i rgb_mask = _mm_set1_epi32(0xffffff00u);  // to preserve RGB
-  const __m128i all_0xff = _mm_set_epi32(0, 0, ~0u, ~0u);
+  const __m128i rgb_mask = _mm_set1_epi32((int)0xffffff00);  // to preserve RGB
+  const __m128i all_0xff = _mm_set_epi32(0, 0, ~0, ~0);
   __m128i all_alphas = all_0xff;
 
   // We must be able to access 3 extra bytes after the last written byte
@@ -106,8 +106,8 @@ static int ExtractAlpha_SSE2(const uint8_t* WEBP_RESTRICT argb, int argb_stride,
   // value is not 0xff if any of the alpha[] is not equal to 0xff.
   uint32_t alpha_and = 0xff;
   int i, j;
-  const __m128i a_mask = _mm_set1_epi32(0xffu);  // to preserve alpha
-  const __m128i all_0xff = _mm_set_epi32(0, 0, ~0u, ~0u);
+  const __m128i a_mask = _mm_set1_epi32(0xff);  // to preserve alpha
+  const __m128i all_0xff = _mm_set_epi32(0, 0, ~0, ~0);
   __m128i all_alphas = all_0xff;
 
   // We must be able to access 3 extra bytes after the last written byte
@@ -178,7 +178,7 @@ static int ExtractAlpha_SSE2(const uint8_t* WEBP_RESTRICT argb, int argb_stride,
 static void ApplyAlphaMultiply_SSE2(uint8_t* rgba, int alpha_first,
                                     int w, int h, int stride) {
   const __m128i zero = _mm_setzero_si128();
-  const __m128i kMult = _mm_set1_epi16(0x8081u);
+  const __m128i kMult = _mm_set1_epi16((short)0x8081);
   const __m128i kMask = _mm_set_epi16(0, 0xff, 0xff, 0, 0, 0xff, 0xff, 0);
   const int kSpan = 4;
   while (h-- > 0) {
@@ -267,7 +267,7 @@ static int HasAlpha32b_SSE2(const uint8_t* src, int length) {
 }
 
 static void AlphaReplace_SSE2(uint32_t* src, int length, uint32_t color) {
-  const __m128i m_color = _mm_set1_epi32(color);
+  const __m128i m_color = _mm_set1_epi32((int)color);
   const __m128i zero = _mm_setzero_si128();
   int i = 0;
   for (; i + 8 <= length; i += 8) {

+ 1 - 1
Pods/libwebp/src/dsp/alpha_processing_sse41.c

@@ -26,7 +26,7 @@ static int ExtractAlpha_SSE41(const uint8_t* WEBP_RESTRICT argb,
   // value is not 0xff if any of the alpha[] is not equal to 0xff.
   uint32_t alpha_and = 0xff;
   int i, j;
-  const __m128i all_0xff = _mm_set1_epi32(~0u);
+  const __m128i all_0xff = _mm_set1_epi32(~0);
   __m128i all_alphas = all_0xff;
 
   // We must be able to access 3 extra bytes after the last written byte

+ 1 - 0
Pods/libwebp/src/dsp/cost.c

@@ -374,6 +374,7 @@ static void SetResidualCoeffs_C(const int16_t* const coeffs,
 VP8GetResidualCostFunc VP8GetResidualCost;
 VP8SetResidualCoeffsFunc VP8SetResidualCoeffs;
 
+extern VP8CPUInfo VP8GetCPUInfo;
 extern void VP8EncDspCostInitMIPS32(void);
 extern void VP8EncDspCostInitMIPSdspR2(void);
 extern void VP8EncDspCostInitSSE2(void);

+ 2 - 2
Pods/libwebp/src/dsp/cost_neon.c

@@ -29,7 +29,7 @@ static void SetResidualCoeffs_NEON(const int16_t* const coeffs,
   const uint8x16_t eob = vcombine_u8(vqmovn_u16(eob_0), vqmovn_u16(eob_1));
   const uint8x16_t masked = vandq_u8(eob, vld1q_u8(position));
 
-#ifdef __aarch64__
+#if WEBP_AARCH64
   res->last = vmaxvq_u8(masked) - 1;
 #else
   const uint8x8_t eob_8x8 = vmax_u8(vget_low_u8(masked), vget_high_u8(masked));
@@ -43,7 +43,7 @@ static void SetResidualCoeffs_NEON(const int16_t* const coeffs,
 
   vst1_lane_s32(&res->last, vreinterpret_s32_u32(eob_32x2), 0);
   --res->last;
-#endif  // __aarch64__
+#endif  // WEBP_AARCH64
 
   res->coeffs = coeffs;
 }

Some files were not shown because too many files changed in this diff