Viewing docs for AWS v7.22.0
published on Wednesday, Mar 11, 2026 by Pulumi
published on Wednesday, Mar 11, 2026 by Pulumi
Viewing docs for AWS v7.22.0
published on Wednesday, Mar 11, 2026 by Pulumi
published on Wednesday, Mar 11, 2026 by Pulumi
Data source for managing AWS Bedrock Inference Profiles.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const test = aws.bedrock.getInferenceProfiles({});
import pulumi
import pulumi_aws as aws
test = aws.bedrock.get_inference_profiles()
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/bedrock"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := bedrock.GetInferenceProfiles(ctx, &bedrock.GetInferenceProfilesArgs{}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var test = Aws.Bedrock.GetInferenceProfiles.Invoke();
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.bedrock.BedrockFunctions;
import com.pulumi.aws.bedrock.inputs.GetInferenceProfilesArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var test = BedrockFunctions.getInferenceProfiles(GetInferenceProfilesArgs.builder()
.build());
}
}
variables:
test:
fn::invoke:
function: aws:bedrock:getInferenceProfiles
arguments: {}
Filter by Type
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const test = aws.bedrock.getInferenceProfiles({
type: "APPLICATION",
});
import pulumi
import pulumi_aws as aws
test = aws.bedrock.get_inference_profiles(type="APPLICATION")
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/bedrock"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := bedrock.GetInferenceProfiles(ctx, &bedrock.GetInferenceProfilesArgs{
Type: pulumi.StringRef("APPLICATION"),
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var test = Aws.Bedrock.GetInferenceProfiles.Invoke(new()
{
Type = "APPLICATION",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.bedrock.BedrockFunctions;
import com.pulumi.aws.bedrock.inputs.GetInferenceProfilesArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var test = BedrockFunctions.getInferenceProfiles(GetInferenceProfilesArgs.builder()
.type("APPLICATION")
.build());
}
}
variables:
test:
fn::invoke:
function: aws:bedrock:getInferenceProfiles
arguments:
type: APPLICATION
Using getInferenceProfiles
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getInferenceProfiles(args: GetInferenceProfilesArgs, opts?: InvokeOptions): Promise<GetInferenceProfilesResult>
function getInferenceProfilesOutput(args: GetInferenceProfilesOutputArgs, opts?: InvokeOptions): Output<GetInferenceProfilesResult>def get_inference_profiles(region: Optional[str] = None,
type: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetInferenceProfilesResult
def get_inference_profiles_output(region: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetInferenceProfilesResult]func GetInferenceProfiles(ctx *Context, args *GetInferenceProfilesArgs, opts ...InvokeOption) (*GetInferenceProfilesResult, error)
func GetInferenceProfilesOutput(ctx *Context, args *GetInferenceProfilesOutputArgs, opts ...InvokeOption) GetInferenceProfilesResultOutput> Note: This function is named GetInferenceProfiles in the Go SDK.
public static class GetInferenceProfiles
{
public static Task<GetInferenceProfilesResult> InvokeAsync(GetInferenceProfilesArgs args, InvokeOptions? opts = null)
public static Output<GetInferenceProfilesResult> Invoke(GetInferenceProfilesInvokeArgs args, InvokeOptions? opts = null)
}public static CompletableFuture<GetInferenceProfilesResult> getInferenceProfiles(GetInferenceProfilesArgs args, InvokeOptions options)
public static Output<GetInferenceProfilesResult> getInferenceProfiles(GetInferenceProfilesArgs args, InvokeOptions options)
fn::invoke:
function: aws:bedrock/getInferenceProfiles:getInferenceProfiles
arguments:
# arguments dictionaryThe following arguments are supported:
getInferenceProfiles Result
The following output properties are available:
- Id string
- The provider-assigned unique ID for this managed resource.
- Inference
Profile List<GetSummaries Inference Profiles Inference Profile Summary> - List of inference profile summary objects. See
inference_profile_summaries. - Region string
- Type string
- Type of the inference profile.
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans the inference profile was created by a user.
- Id string
- The provider-assigned unique ID for this managed resource.
- Inference
Profile []GetSummaries Inference Profiles Inference Profile Summary - List of inference profile summary objects. See
inference_profile_summaries. - Region string
- Type string
- Type of the inference profile.
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans the inference profile was created by a user.
- id String
- The provider-assigned unique ID for this managed resource.
- inference
Profile List<GetSummaries Inference Profiles Inference Profile Summary> - List of inference profile summary objects. See
inference_profile_summaries. - region String
- type String
- Type of the inference profile.
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans the inference profile was created by a user.
- id string
- The provider-assigned unique ID for this managed resource.
- inference
Profile GetSummaries Inference Profiles Inference Profile Summary[] - List of inference profile summary objects. See
inference_profile_summaries. - region string
- type string
- Type of the inference profile.
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans the inference profile was created by a user.
- id str
- The provider-assigned unique ID for this managed resource.
- inference_
profile_ Sequence[Getsummaries Inference Profiles Inference Profile Summary] - List of inference profile summary objects. See
inference_profile_summaries. - region str
- type str
- Type of the inference profile.
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans the inference profile was created by a user.
- id String
- The provider-assigned unique ID for this managed resource.
- inference
Profile List<Property Map>Summaries - List of inference profile summary objects. See
inference_profile_summaries. - region String
- type String
- Type of the inference profile.
SYSTEM_DEFINEDmeans that the inference profile is defined by Amazon Bedrock.APPLICATIONmeans the inference profile was created by a user.
Supporting Types
GetInferenceProfilesInferenceProfileSummary
- Created
At string - Time at which the inference profile was created.
- Description string
- Description of the inference profile.
- Inference
Profile stringArn - Amazon Resource Name (ARN) of the inference profile.
- Inference
Profile stringId - Unique identifier of the inference profile.
- Inference
Profile stringName - Name of the inference profile.
- Models
List<Get
Inference Profiles Inference Profile Summary Model> - List of information about each model in the inference profile. See
modelsBlock. - Status string
- Status of the inference profile.
ACTIVEmeans that the inference profile is available to use. - Type string
- Filters for inference profiles that match the type you specify. Valid values are:
SYSTEM_DEFINED,APPLICATION. - Updated
At string - Time at which the inference profile was last updated.
- Created
At string - Time at which the inference profile was created.
- Description string
- Description of the inference profile.
- Inference
Profile stringArn - Amazon Resource Name (ARN) of the inference profile.
- Inference
Profile stringId - Unique identifier of the inference profile.
- Inference
Profile stringName - Name of the inference profile.
- Models
[]Get
Inference Profiles Inference Profile Summary Model - List of information about each model in the inference profile. See
modelsBlock. - Status string
- Status of the inference profile.
ACTIVEmeans that the inference profile is available to use. - Type string
- Filters for inference profiles that match the type you specify. Valid values are:
SYSTEM_DEFINED,APPLICATION. - Updated
At string - Time at which the inference profile was last updated.
- created
At String - Time at which the inference profile was created.
- description String
- Description of the inference profile.
- inference
Profile StringArn - Amazon Resource Name (ARN) of the inference profile.
- inference
Profile StringId - Unique identifier of the inference profile.
- inference
Profile StringName - Name of the inference profile.
- models
List<Get
Inference Profiles Inference Profile Summary Model> - List of information about each model in the inference profile. See
modelsBlock. - status String
- Status of the inference profile.
ACTIVEmeans that the inference profile is available to use. - type String
- Filters for inference profiles that match the type you specify. Valid values are:
SYSTEM_DEFINED,APPLICATION. - updated
At String - Time at which the inference profile was last updated.
- created
At string - Time at which the inference profile was created.
- description string
- Description of the inference profile.
- inference
Profile stringArn - Amazon Resource Name (ARN) of the inference profile.
- inference
Profile stringId - Unique identifier of the inference profile.
- inference
Profile stringName - Name of the inference profile.
- models
Get
Inference Profiles Inference Profile Summary Model[] - List of information about each model in the inference profile. See
modelsBlock. - status string
- Status of the inference profile.
ACTIVEmeans that the inference profile is available to use. - type string
- Filters for inference profiles that match the type you specify. Valid values are:
SYSTEM_DEFINED,APPLICATION. - updated
At string - Time at which the inference profile was last updated.
- created_
at str - Time at which the inference profile was created.
- description str
- Description of the inference profile.
- inference_
profile_ strarn - Amazon Resource Name (ARN) of the inference profile.
- inference_
profile_ strid - Unique identifier of the inference profile.
- inference_
profile_ strname - Name of the inference profile.
- models
Sequence[Get
Inference Profiles Inference Profile Summary Model] - List of information about each model in the inference profile. See
modelsBlock. - status str
- Status of the inference profile.
ACTIVEmeans that the inference profile is available to use. - type str
- Filters for inference profiles that match the type you specify. Valid values are:
SYSTEM_DEFINED,APPLICATION. - updated_
at str - Time at which the inference profile was last updated.
- created
At String - Time at which the inference profile was created.
- description String
- Description of the inference profile.
- inference
Profile StringArn - Amazon Resource Name (ARN) of the inference profile.
- inference
Profile StringId - Unique identifier of the inference profile.
- inference
Profile StringName - Name of the inference profile.
- models List<Property Map>
- List of information about each model in the inference profile. See
modelsBlock. - status String
- Status of the inference profile.
ACTIVEmeans that the inference profile is available to use. - type String
- Filters for inference profiles that match the type you specify. Valid values are:
SYSTEM_DEFINED,APPLICATION. - updated
At String - Time at which the inference profile was last updated.
GetInferenceProfilesInferenceProfileSummaryModel
- Model
Arn string - Amazon Resource Name (ARN) of the model.
- Model
Arn string - Amazon Resource Name (ARN) of the model.
- model
Arn String - Amazon Resource Name (ARN) of the model.
- model
Arn string - Amazon Resource Name (ARN) of the model.
- model_
arn str - Amazon Resource Name (ARN) of the model.
- model
Arn String - Amazon Resource Name (ARN) of the model.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
awsTerraform Provider.
Viewing docs for AWS v7.22.0
published on Wednesday, Mar 11, 2026 by Pulumi
published on Wednesday, Mar 11, 2026 by Pulumi
