Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 50 additions & 3 deletions backends/apple/coreml/runtime/delegate/backend_delegate.mm
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ - (BOOL)purgeModelsCacheAndReturnError:(NSError * _Nullable __autoreleasing *)er
@property (assign, readonly, nonatomic) BackendDelegate::Config config;
@property (strong, readonly, nonatomic) dispatch_queue_t syncQueue;
@property (strong, nonatomic, nullable) ETCoreMLModelManager *impl;
@property (strong, nonatomic, nullable) ETCoreMLModelCache *defaultCache;
@property (assign, readonly, nonatomic) BOOL isAvailable;

@end
Expand Down Expand Up @@ -165,6 +166,16 @@ - (BOOL)_loadAndReturnError:(NSError * _Nullable __autoreleasing *)error {

self.impl = modelManager;

// Create default filesystem cache at the same location as assets
NSURL *defaultCacheURL = [NSURL fileURLWithPath:ETCoreMLStrings.assetsDirectoryPath isDirectory:YES];
ETCoreMLModelCache *defaultCache = [[ETCoreMLModelCache alloc] initWithCacheRootDirectory:defaultCacheURL];
if (defaultCache.isReady) {
self.defaultCache = defaultCache;
} else {
ETCoreMLLogError(defaultCache.initializationError,
"Default cache initialization failed, will use asset manager as fallback");
}

if (self.config.should_prewarm_asset) {
[modelManager prewarmRecentlyUsedAssetsWithMaxCount:1];
}
Expand Down Expand Up @@ -199,6 +210,7 @@ - (ModelHandle*)loadModelFromAOTData:(NSData*)data
configuration:configuration
methodName:nil
functionName:nil
cachePath:nil
error:error];
}

Expand All @@ -221,6 +233,23 @@ - (ModelHandle*)loadModelFromAOTData:(NSData*)data
functionName:(nullable NSString*)functionName
cachePath:(nullable NSString*)cachePath
error:(NSError* __autoreleasing*)error {
// Default to using the old cache (useNewCache = NO)
return [self loadModelFromAOTData:data
configuration:configuration
methodName:methodName
functionName:functionName
cachePath:cachePath
useNewCache:NO
error:error];
}

- (ModelHandle*)loadModelFromAOTData:(NSData*)data
configuration:(MLModelConfiguration*)configuration
methodName:(nullable NSString*)methodName
functionName:(nullable NSString*)functionName
cachePath:(nullable NSString*)cachePath
useNewCache:(BOOL)useNewCache
error:(NSError* __autoreleasing*)error {
if (![self loadAndReturnError:error]) {
return nil;
}
Expand All @@ -240,8 +269,21 @@ - (ModelHandle*)loadModelFromAOTData:(NSData*)data
return nil;
}
cache = modelCache;
} else if (useNewCache) {
if (self.defaultCache != nil) {
// Use default filesystem cache
cache = self.defaultCache;
} else {
// Fallback: useNewCache requested but default cache unavailable
NSError *fallbackError = [NSError errorWithDomain:ETCoreMLErrorDomain
code:ETCoreMLErrorInternalError
userInfo:@{NSLocalizedDescriptionKey: @"Default cache unavailable"}];
ETCoreMLLogError(fallbackError,
"useNewCache=YES but default cache is unavailable, falling back to asset manager");
}
}
// cache == nil means loadModelFromAOTData will use self.cache (default cache)
// If useNewCache is false or defaultCache is nil, cache remains nil
// and loadModelFromAOTData will use the asset manager path

auto handle = [self.impl loadModelFromAOTData:data
configuration:configuration
Expand Down Expand Up @@ -346,15 +388,19 @@ explicit BackendDelegateImpl(const Config& config) noexcept
NSString *methodNameStr = method_name ? @(method_name) : nil;
NSString *functionNameStr = function_name ? @(function_name) : nil;

// Parse cache_dir from runtime_specs
// Parse cache_dir and _use_new_cache from runtime_specs
NSString *cachePath = nil;
BOOL useNewCache = NO; // Default to using the old cache (asset manager)
for (size_t i = 0; i < runtime_specs.size(); ++i) {
const auto& opt = runtime_specs[i];
if (std::strcmp(opt.key, "cache_dir") == 0) {
if (auto* arr = std::get_if<std::array<char, executorch::runtime::kMaxOptionValueLength>>(&opt.value)) {
cachePath = @(arr->data());
}
break;
} else if (std::strcmp(opt.key, "_use_new_cache") == 0) {
if (auto* val = std::get_if<bool>(&opt.value)) {
useNewCache = *val ? YES : NO;
}
}
}

Expand All @@ -366,6 +412,7 @@ explicit BackendDelegateImpl(const Config& config) noexcept
methodName:methodNameStr
functionName:functionNameStr
cachePath:cachePath
useNewCache:useNewCache
error:&localError];
if (localError != nil) {
ETCoreMLLogError(localError, "Model init failed");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,21 @@ class LoadOptionsBuilder {
return *this;
}

/**
* Controls whether to use the new filesystem cache (ETCoreMLModelCache).
*
* This is a temporary runtime option for A/B testing the new cache
* implementation. It will be removed once the new cache is fully rolled out.
*
* @param enabled If true, uses the new filesystem cache.
* If false (default), uses the legacy asset manager.
* @return Reference to this builder for chaining.
*/
LoadOptionsBuilder& setUseNewCache(bool enabled) {
options_.set_option("_use_new_cache", enabled);
return *this;
}

/**
* Returns the backend identifier for this options builder.
*/
Expand Down
118 changes: 118 additions & 0 deletions backends/apple/coreml/runtime/test/coreml_backend_options_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -221,3 +221,121 @@ TEST_F(CoreMLBackendOptionsTest, IntegrationWithOptionsMapCacheDir) {
}
EXPECT_TRUE(found_cache_dir) << "cache_dir option not found";
}

// Test setUseNewCache with true
TEST_F(CoreMLBackendOptionsTest, SetUseNewCacheTrue) {
LoadOptionsBuilder builder;
builder.setUseNewCache(true);

auto options = builder.view();
EXPECT_EQ(options.size(), 1);
EXPECT_STREQ(options[0].key, "_use_new_cache");

if (auto* val = std::get_if<bool>(&options[0].value)) {
EXPECT_TRUE(*val);
} else {
FAIL() << "Expected bool value for _use_new_cache";
}
}

// Test setUseNewCache with false
TEST_F(CoreMLBackendOptionsTest, SetUseNewCacheFalse) {
LoadOptionsBuilder builder;
builder.setUseNewCache(false);

auto options = builder.view();
EXPECT_EQ(options.size(), 1);
EXPECT_STREQ(options[0].key, "_use_new_cache");

if (auto* val = std::get_if<bool>(&options[0].value)) {
EXPECT_FALSE(*val);
} else {
FAIL() << "Expected bool value for _use_new_cache";
}
}

// Test setUseNewCache method chaining
TEST_F(CoreMLBackendOptionsTest, SetUseNewCacheChaining) {
LoadOptionsBuilder builder;
auto& result = builder.setUseNewCache(true);

// Should return reference to the same builder
EXPECT_EQ(&result, &builder);
}

// Test combining setComputeUnit, setCacheDirectory, and setUseNewCache
TEST_F(CoreMLBackendOptionsTest, AllOptionsCombined) {
LoadOptionsBuilder builder;
builder.setComputeUnit(LoadOptionsBuilder::ComputeUnit::CPU_AND_GPU)
.setCacheDirectory("/path/to/cache")
.setUseNewCache(true);

auto options = builder.view();
EXPECT_EQ(options.size(), 3);

// Find and verify each option
bool found_compute_unit = false;
bool found_cache_dir = false;
bool found_use_new_cache = false;

for (size_t i = 0; i < options.size(); ++i) {
if (std::strcmp(options[i].key, "compute_unit") == 0) {
found_compute_unit = true;
if (auto* arr = std::get_if<std::array<char, kMaxOptionValueLength>>(&options[i].value)) {
EXPECT_STREQ(arr->data(), "cpu_and_gpu");
}
} else if (std::strcmp(options[i].key, "cache_dir") == 0) {
found_cache_dir = true;
if (auto* arr = std::get_if<std::array<char, kMaxOptionValueLength>>(&options[i].value)) {
EXPECT_STREQ(arr->data(), "/path/to/cache");
}
} else if (std::strcmp(options[i].key, "_use_new_cache") == 0) {
found_use_new_cache = true;
if (auto* val = std::get_if<bool>(&options[i].value)) {
EXPECT_TRUE(*val);
}
}
}

EXPECT_TRUE(found_compute_unit) << "compute_unit option not found";
EXPECT_TRUE(found_cache_dir) << "cache_dir option not found";
EXPECT_TRUE(found_use_new_cache) << "_use_new_cache option not found";
}

// Test integration with LoadBackendOptionsMap including _use_new_cache
TEST_F(CoreMLBackendOptionsTest, IntegrationWithOptionsMapUseNewCache) {
LoadOptionsBuilder coreml_opts;
coreml_opts.setUseNewCache(true);

LoadBackendOptionsMap map;
EXPECT_EQ(map.set_options(coreml_opts), Error::Ok);

EXPECT_EQ(map.size(), 1);
EXPECT_TRUE(map.has_options("CoreMLBackend"));

auto retrieved = map.get_options("CoreMLBackend");
EXPECT_EQ(retrieved.size(), 1);
EXPECT_STREQ(retrieved[0].key, "_use_new_cache");

if (auto* val = std::get_if<bool>(&retrieved[0].value)) {
EXPECT_TRUE(*val);
} else {
FAIL() << "Expected bool value for _use_new_cache";
}
}

// Test setUseNewCache updates when called multiple times
TEST_F(CoreMLBackendOptionsTest, SetUseNewCacheMultipleTimes) {
LoadOptionsBuilder builder;
builder.setUseNewCache(true);
builder.setUseNewCache(false);

auto options = builder.view();
EXPECT_EQ(options.size(), 1);

if (auto* val = std::get_if<bool>(&options[0].value)) {
EXPECT_FALSE(*val); // Last value wins
} else {
FAIL() << "Expected bool value for _use_new_cache";
}
}
Loading