specification/cognitiveservices/OpenAI.Inference/tspconfig.yaml (43 lines of code) (raw):

parameters: "service-dir": default: "sdk/openai" "dependencies": default: "" emit: - "@azure-tools/typespec-autorest" linter: extends: - "@azure-tools/typespec-azure-rulesets/data-plane" options: "@azure-tools/typespec-autorest": emitter-output-dir: "{project-root}/../" output-file: "{azure-resource-provider-folder}/AzureOpenAI/inference/{version-status}/{version}/generated.json" azure-resource-provider-folder: "data-plane" emit-lro-options: "none" omit-unreachable-types: true "@azure-tools/typespec-csharp": package-dir: "Azure.AI.OpenAI" namespace: "Azure.AI.OpenAI" clear-output-folder: true model-namespace: false generate-protocol-methods: false flavor: azure "@azure-tools/typespec-java": package-dir: "azure-ai-openai" namespace: "com.azure.ai.openai" partial-update: true enable-sync-stack: true generate-tests: false generate-samples: false custom-types-subpackage: "implementation.models" custom-types: "FunctionCallPreset,FileListResponse,OpenAIPageableListOfBatch" customization-class: customization/src/main/java/OpenAICustomizations.java flavor: azure "@azure-tools/typespec-ts": package-dir: "openai" generateMetadata: false generateTest: false isModularLibrary: true packageDetails: name: "@azure/openai" flavor: azure