I am having the same issue with a paid capacity.
I have Storabe Blob Data Contributor & Contributor and Fabric Capacity Administrator.
I created a new Workspace and an empty Lakehouse, and when trying to create an empty table, the notebook gives me the following error:
Request failed: HTTP/1.1 403 Forbidden com.microsoft.fabric.spark.metadata.Helpers$.executeRequest(Helpers.scala:154) com.microsoft.fabric.platform.PbiPlatformClient.newGetRequest(PbiPlatformClient.scala:51) com.microsoft.fabric.platform.PbiPlatformClient.newGetRequest$(PbiPlatformClient.scala:47) com.microsoft.fabric.platform.PbiPlatformInternalApiClient.newGetRequest(PbiPlatformClient.scala:175) com.microsoft.fabric.platform.PbiPlatformInternalApiClient.getAllWorkspaces(PbiPlatformClient.scala:199) com.microsoft.fabric.platform.InstrumentedPbiPlatformClient.$anonfun$getAllWorkspaces$1(PbiPlatformClient.scala:164) com.microsoft.fabric.spark.metadata.Helpers$.timed(Helpers.scala:29) com.microsoft.fabric.platform.InstrumentedPbiPlatformClient.getAllWorkspaces(PbiPlatformClient.scala:164) com.microsoft.fabric.platform.PbiPlatformCachingClient.$anonfun$workspaceCache$1(PbiPlatformClient.scala:117) com.google.common.base.Suppliers$ExpiringMemoizingSupplier.get(Suppliers.java:192) com.microsoft.fabric.platform.PbiPlatformCachingClient.getWorkspace(PbiPlatformClient.scala:146) com.microsoft.fabric.platform.PbiPlatformCachingClient.getArtifacts(PbiPlatformClient.scala:136) com.microsoft.fabric.platform.PbiPlatformCachingClient.$anonfun$artifactCache$1(PbiPlatformClient.scala:130) com.github.benmanes.caffeine.cache.LocalLoadingCache.lambda$newMappingFunction$2(LocalLoadingCache.java:145) com.github.benmanes.caffeine.cache.BoundedLocalCache.lambda$doComputeIfAbsent$14(BoundedLocalCache.java:2406) java.base/java.util.concurrent.ConcurrentHashMap.compute(ConcurrentHashMap.java:1908) com.github.benmanes.caffeine.cache.BoundedLocalCache.doComputeIfAbsent(BoundedLocalCache.java:2404) com.github.benmanes.caffeine.cache.BoundedLocalCache.computeIfAbsent(BoundedLocalCache.java:2387) com.github.benmanes.caffeine.cache.LocalCache.computeIfAbsent(LocalCache.java:108) com.github.benmanes.caffeine.cache.LocalLoadingCache.get(LocalLoadingCache.java:56) com.microsoft.fabric.platform.PbiPlatformCachingClient.getArtifact(PbiPlatformClient.scala:151) com.microsoft.fabric.spark.metadata.SchemaPathResolver.getArtifactRoot(pathResolvers.scala:127) com.microsoft.fabric.spark.metadata.SchemaPathResolver.getSchemaRoot(pathResolvers.scala:144) com.microsoft.fabric.spark.metadata.DefaultSchemaMetadataManager.listSchemas(DefaultSchemaMetadataManager.scala:218) com.microsoft.fabric.spark.metadata.DefaultSchemaMetadataManager.$anonfun$defaultSchemaPathResolver$1(DefaultSchemaMetadataManager.scala:30) com.microsoft.fabric.spark.metadata.NamespaceResolver.$anonfun$decodedSchemaNameCache$1(pathResolvers.scala:46) com.github.benmanes.caffeine.cache.LocalLoadingCache.lambda$newMappingFunction$2(LocalLoadingCache.java:145) com.github.benmanes.caffeine.cache.BoundedLocalCache.lambda$doComputeIfAbsent$14(BoundedLocalCache.java:2406) java.base/java.util.concurrent.ConcurrentHashMap.compute(ConcurrentHashMap.java:1908) com.github.benmanes.caffeine.cache.BoundedLocalCache.doComputeIfAbsent(BoundedLocalCache.java:2404) com.github.benmanes.caffeine.cache.BoundedLocalCache.computeIfAbsent(BoundedLocalCache.java:2387) com.github.benmanes.caffeine.cache.LocalCache.computeIfAbsent(LocalCache.java:108) com.github.benmanes.caffeine.cache.LocalLoadingCache.get(LocalLoadingCache.java:56) com.microsoft.fabric.spark.metadata.Helpers$.forceLoadIfRequiredInCachedMap(Helpers.scala:61) com.microsoft.fabric.spark.metadata.NamespaceResolver.inferNamespace(pathResolvers.scala:87) com.microsoft.fabric.spark.metadata.NamespaceResolver.$anonfun$toNamespace$1(pathResolvers.scala:79) java.base/java.util.concurrent.ConcurrentHashMap.computeIfAbsent(ConcurrentHashMap.java:1705) com.microsoft.fabric.spark.metadata.NamespaceResolver.toNamespace(pathResolvers.scala:79) com.microsoft.fabric.spark.metadata.DefaultSchemaMetadataManager.getSchema(DefaultSchemaMetadataManager.scala:73) com.microsoft.fabric.spark.metadata.MetadataManager.getSchema(MetadataManager.scala:192) com.microsoft.fabric.spark.metadata.InstrumentedMetadataManager.super$getSchema(MetadataManager.scala:321) com.microsoft.fabric.spark.metadata.InstrumentedMetadataManager.$anonfun$getSchema$1(MetadataManager.scala:321) com.microsoft.fabric.spark.metadata.Helpers$.timed(Helpers.scala:29) com.microsoft.fabric.spark.metadata.InstrumentedMetadataManager.getSchema(MetadataManager.scala:321) com.microsoft.fabric.spark.catalog.OnelakeExternalCatalog.getDatabase(OnelakeExternalCatalog.scala:78) com.microsoft.fabric.spark.catalog.OnelakeExternalCatalog.databaseExists(OnelakeExternalCatalog.scala:84) com.microsoft.fabric.spark.catalog.InstrumentedExternalCatalog.$anonfun$databaseExists$1(OnelakeExternalCatalog.scala:417) scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23) com.microsoft.fabric.spark.metadata.Helpers$.timed(Helpers.scala:29) com.microsoft.fabric.spark.catalog.InstrumentedExternalCatalog.databaseExists(OnelakeExternalCatalog.scala:417) org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:169) org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:142) org.apache.spark.sql.hive.HiveSessionStateBuilder.externalCatalog(HiveSessionStateBuilder.scala:54) org.apache.spark.sql.hive.HiveSessionStateBuilder.$anonfun$catalog$1(HiveSessionStateBuilder.scala:69) org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog$lzycompute(SessionCatalog.scala:140) org.apache.spark.sql.catalyst.catalog.SessionCatalog.externalCatalog(SessionCatalog.scala:140) org.apache.spark.sql.catalyst.catalog.SessionCatalog.databaseExists(SessionCatalog.scala:363) org.apache.spark.sql.catalyst.catalog.SessionCatalog.requireDbExists(SessionCatalog.scala:285) org.apache.spark.sql.catalyst.catalog.SessionCatalog.getTableRawMetadata(SessionCatalog.scala:622) org.apache.spark.sql.catalyst.catalog.SessionCatalog.getTableMetadata(SessionCatalog.scala:606) org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.loadTable(V2SessionCatalog.scala:80) org.apache.spark.sql.connector.catalog.TableCatalog.tableExists(TableCatalog.java:163) org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.tableExists(V2SessionCatalog.scala:45) org.apache.spark.sql.connector.catalog.DelegatingCatalogExtension.tableExists(DelegatingCatalogExtension.java:93) org.apache.spark.sql.delta.catalog.DeltaCatalog.org$apache$spark$sql$delta$catalog$SupportsPathIdentifier$$super$tableExists(DeltaCatalog.scala:57) org.apache.spark.sql.delta.catalog.SupportsPathIdentifier.$anonfun$tableExists$1(DeltaCatalog.scala:739) scala.runtime.java8.JFunction0$mcZ$sp.apply(JFunction0$mcZ$sp.java:23) org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile(DeltaLogging.scala:141) org.apache.spark.sql.delta.metering.DeltaLogging.recordFrameProfile$(DeltaLogging.scala:139) org.apache.spark.sql.delta.catalog.DeltaCatalog.recordFrameProfile(DeltaCatalog.scala:57) org.apache.spark.sql.delta.catalog.SupportsPathIdentifier.tableExists(DeltaCatalog.scala:732) org.apache.spark.sql.delta.catalog.SupportsPathIdentifier.tableExists$(DeltaCatalog.scala:730) org.apache.spark.sql.delta.catalog.DeltaCatalog.tableExists(DeltaCatalog.scala:57) org.apache.spark.sql.execution.datasources.v2.CreateTableExec.run(CreateTableExec.scala:44) org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result$lzycompute(V2CommandExec.scala:43) org.apache.spark.sql.execution.datasources.v2.V2CommandExec.result(V2CommandExec.scala:43) org.apache.spark.sql.execution.datasources.v2.V2CommandExec.executeCollect(V2CommandExec.scala:49) org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:152) org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:125) org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:214) org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:100) org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:827) org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:67) org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:152) org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:145) org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:512) org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:104) org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:512) org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:32) org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267) org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263) org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32) org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:32) org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:488) org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:145) org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:129) org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:123) org.apache.spark.sql.Dataset.<init>(Dataset.scala:229) org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99) org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:827) org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96) org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:640) org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:827) org.apache.spark.sql.SparkSession.sql(SparkSession.scala:630) org.apache.spark.sql.SparkSession.sql(SparkSession.scala:671) org.apache.livy.repl.SQLInterpreter.execute(SQLInterpreter.scala:163) org.apache.livy.repl.Session.$anonfun$executeCode$1(Session.scala:868) scala.Option.map(Option.scala:230) org.apache.livy.repl.Session.executeCode(Session.scala:865) org.apache.livy.repl.Session.$anonfun$execute$10(Session.scala:569) org.apache.livy.repl.Session.withRealtimeOutputSupport(Session.scala:1094) org.apache.livy.repl.Session.$anonfun$execute$3(Session.scala:569) scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) scala.concurrent.Future$.$anonfun$apply$1(Future.scala:659) scala.util.Success.$anonfun$map$1(Try.scala:255) scala.util.Success.map(Try.scala:213) scala.concurrent.Future.$anonfun$map$1(Future.scala:292) scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33) scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33) scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64) java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) java.base/java.lang.Thread.run(Thread.java:829)